From 98ff63d44913ef9230f8a71d4a39a4a772b81884 Mon Sep 17 00:00:00 2001
From: Hari Dara
Date: Fri, 17 Jan 2025 12:23:18 +0530
Subject: [PATCH 01/70] PBE: POC for initializing STK and a default
implementation for key provider
---
.gitignore | 4 +
.../org/apache/hadoop/hbase/HConstants.java | 12 ++
.../hadoop/hbase/io/crypto/Encryption.java | 3 +
.../hbase/io/crypto/KeyStoreKeyProvider.java | 10 +-
.../hadoop/hbase/io/crypto/PBEKeyData.java | 70 +++++++
.../hbase/io/crypto/PBEKeyProvider.java | 29 +++
.../hadoop/hbase/io/crypto/PBEKeyStatus.java | 28 +++
.../io/crypto/PBEKeyStoreKeyProvider.java | 80 +++++++
.../apache/hadoop/hbase/util/GsonUtil.java | 11 +
.../io/crypto/TestKeyStoreKeyProvider.java | 65 ++++--
.../io/crypto/TestPBEKeyStoreKeyProvider.java | 147 +++++++++++++
.../hbase/master/ClusterKeyManager.java | 196 ++++++++++++++++++
.../apache/hadoop/hbase/master/HMaster.java | 4 +
.../hadoop/hbase/master/KeyMetaSchema.java | 27 +++
.../hadoop/hbase/master/MasterFileSystem.java | 8 +
.../hadoop/hbase/master/SplitWALManager.java | 1 -
16 files changed, 672 insertions(+), 23 deletions(-)
create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java
create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java
create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java
create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java
create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java
diff --git a/.gitignore b/.gitignore
index 52d169dd5ad7..89e60343cc28 100644
--- a/.gitignore
+++ b/.gitignore
@@ -25,3 +25,7 @@ linklint/
**/*.log
tmp
**/.flattened-pom.xml
+.*.sw*
+ID
+filenametags
+tags
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index eba3eb657ea5..670bda523919 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1192,6 +1192,11 @@ public enum OperationStatusCode {
/** Temporary directory used for table creation and deletion */
public static final String HBASE_TEMP_DIRECTORY = ".tmp";
+ /**
+ * Directory used for storing master keys for the cluster
+ */
+ public static final String CLUSTER_KEYS_DIRECTORY = ".cluster_keys";
+ public static final String CLUSTER_KEY_FILE_PREFIX = "cluster_key.";
/**
* The period (in milliseconds) between computing region server point in time metrics
*/
@@ -1284,6 +1289,13 @@ public enum OperationStatusCode {
/** Configuration key for the name of the master key for the cluster, a string */
public static final String CRYPTO_MASTERKEY_NAME_CONF_KEY = "hbase.crypto.master.key.name";
+ public static final String CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY =
+ "hbase.crypto.pbe.master.key.name";
+
+ public static final String CRYPTO_PBE_ENABLED_CONF_KEY = "hbase.crypto.pbe.enabled";
+
+ public static final String CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX = "hbase.crypto.pbe.prefix.";
+
/** Configuration key for the name of the alternate master key for the cluster, a string */
public static final String CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY =
"hbase.crypto.master.alternate.key.name";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 13e335b82ee3..5dac1af2c27a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -561,6 +561,9 @@ public static KeyProvider getKeyProvider(Configuration conf) {
provider = (KeyProvider) ReflectionUtils
.newInstance(getClassLoaderForClass(KeyProvider.class).loadClass(providerClassName), conf);
provider.init(providerParameters);
+ if (provider instanceof PBEKeyProvider) {
+ ((PBEKeyProvider) provider).initConfig(conf);
+ }
if (LOG.isDebugEnabled()) {
LOG.debug("Installed " + providerClassName + " into key provider cache");
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
index 604bede13b20..c401d3b3f6b9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
@@ -76,6 +76,8 @@
@InterfaceAudience.Public
public class KeyStoreKeyProvider implements KeyProvider {
+ private static final char[] NO_PASSWORD = new char[0];
+
protected KeyStore store;
protected char[] password; // can be null if no password
protected Properties passwordFile; // can be null if no file provided
@@ -172,9 +174,15 @@ protected char[] getAliasPassword(String alias) {
@Override
public Key getKey(String alias) {
+ // First try with no password, as it is more common to have a password only for the store.
try {
- return store.getKey(alias, getAliasPassword(alias));
+ return store.getKey(alias, NO_PASSWORD);
} catch (UnrecoverableKeyException e) {
+ try {
+ return store.getKey(alias, getAliasPassword(alias));
+ } catch (UnrecoverableKeyException|NoSuchAlgorithmException|KeyStoreException e2) {
+ // Ignore.
+ }
throw new RuntimeException(e);
} catch (KeyStoreException e) {
throw new RuntimeException(e);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
new file mode 100644
index 000000000000..f69201c10a7e
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.yetus.audience.InterfaceAudience;
+import java.security.Key;
+
+@InterfaceAudience.Public
+public class PBEKeyData {
+ private byte[] pbe_prefix;
+ private Key theKey;
+ private PBEKeyStatus keyStatus;
+ private String keyMetadata;
+
+ public PBEKeyData(byte[] pbe_prefix, Key theKey, PBEKeyStatus keyStatus, String keyMetadata) {
+ this.pbe_prefix = pbe_prefix;
+ this.theKey = theKey;
+ this.keyStatus = keyStatus;
+ this.keyMetadata = keyMetadata;
+ }
+
+ public byte[] getPbe_prefix() {
+ return pbe_prefix;
+ }
+
+ public Key getTheKey() {
+ return theKey;
+ }
+
+ public PBEKeyStatus getKeyStatus() {
+ return keyStatus;
+ }
+
+ public String getKeyMetadata() {
+ return keyMetadata;
+ }
+
+ @Override public boolean equals(Object o) {
+ if (this == o) return true;
+
+ if (o == null || getClass() != o.getClass()) return false;
+
+ PBEKeyData that = (PBEKeyData) o;
+
+ return new EqualsBuilder().append(pbe_prefix, that.pbe_prefix).append(theKey, that.theKey)
+ .append(keyStatus, that.keyStatus).append(keyMetadata, that.keyMetadata).isEquals();
+ }
+
+ @Override public int hashCode() {
+ return new HashCodeBuilder(17, 37).append(pbe_prefix).append(
+ theKey).append(keyStatus).append(keyMetadata).toHashCode();
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java
new file mode 100644
index 000000000000..44e6945a2ff3
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyProvider.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Public
+public interface PBEKeyProvider extends KeyProvider {
+ void initConfig(Configuration conf);
+ PBEKeyData getClusterKey(byte[] clusterId);
+ PBEKeyData getPBEKey(byte[] pbe_prefix);
+ PBEKeyData unwrapKey(String keyMetaData);
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java
new file mode 100644
index 000000000000..853ab59e2c2b
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStatus.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Public
+public enum PBEKeyStatus {
+ ACTIVE,
+ INACTIVE,
+ FAILED,
+ DISABLED,
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
new file mode 100644
index 000000000000..a44afa2e6475
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
@@ -0,0 +1,80 @@
+package org.apache.hadoop.hbase.io.crypto;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.GsonUtil;
+import org.apache.yetus.audience.InterfaceAudience;
+import java.security.Key;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.Map;
+
+@InterfaceAudience.Public
+public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKeyProvider {
+ public static final String KEY_METADATA_ALIAS = "KeyAlias";
+ public static final String KEY_METADATA_PREFIX = "PBE_PREFIX";
+
+ private Configuration conf;
+
+ @Override public void initConfig(Configuration conf) {
+ this.conf = conf;
+ }
+
+ @Override public PBEKeyData getClusterKey(byte[] clusterId) {
+ checkConfig();
+ String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null);
+ if (masterKeyAlias == null) {
+ throw new RuntimeException("No alias configured for master key");
+ }
+ Key key = getKey(masterKeyAlias);
+ if (key == null) {
+ throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias);
+ }
+ // Encode clusterId too for consistency with that of PBE prefixes.
+ String keyMetadata = generateKeyMetadata(masterKeyAlias,
+ Base64.getEncoder().encodeToString(clusterId));
+ return new PBEKeyData(clusterId, key, PBEKeyStatus.ACTIVE, keyMetadata);
+ }
+
+ @Override public PBEKeyData getPBEKey(byte[] pbe_prefix) {
+ checkConfig();
+ String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix);
+ String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." +
+ "alias";
+ String keyAlias = conf.get(aliasConfKey, null);
+ if (keyAlias != null) {
+ String keyMetadata = generateKeyMetadata(keyAlias, encodedPrefix);
+ return unwrapKey(keyMetadata);
+ }
+ return null;
+ }
+
+ @Override public PBEKeyData unwrapKey(String keyMetadataStr) {
+ Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr,
+ HashMap.class);
+ String alias = keyMetadata.get(KEY_METADATA_ALIAS);
+ Key key = getKey(alias);
+ if (key != null) {
+ String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX);
+ String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix +
+ ".active";
+ boolean isActive = conf.getBoolean(activeStatusConfKey, true);
+ return new PBEKeyData(Base64.getDecoder().decode(encodedPrefix), key,
+ isActive ? PBEKeyStatus.ACTIVE : PBEKeyStatus.INACTIVE, keyMetadataStr);
+ }
+ return null;
+ }
+
+ private String generateKeyMetadata(String aliasName, String encodedPrefix) {
+ return GsonUtil.getDefaultInstance().toJson(new HashMap() {{
+ put(KEY_METADATA_ALIAS, aliasName);
+ put(KEY_METADATA_PREFIX, encodedPrefix);
+ }}, HashMap.class);
+ }
+
+ private void checkConfig() {
+ if (conf == null) {
+ throw new IllegalStateException("initConfig is not called or config is null");
+ }
+ }
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java
index e592b1f935a1..adb6536efbb1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/GsonUtil.java
@@ -19,6 +19,7 @@
import java.io.IOException;
import java.util.concurrent.atomic.LongAdder;
+import org.apache.hbase.thirdparty.com.google.gson.Gson;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder;
@@ -33,6 +34,8 @@
@InterfaceAudience.Private
public final class GsonUtil {
+ private static Gson INSTANCE;
+
private GsonUtil() {
}
@@ -62,4 +65,12 @@ public LongAdder read(JsonReader in) throws IOException {
public static GsonBuilder createGsonWithDisableHtmlEscaping() {
return createGson().disableHtmlEscaping();
}
+
+ public static Gson getDefaultInstance() {
+ if (INSTANCE == null) {
+ INSTANCE = createGson().create();
+
+ }
+ return INSTANCE;
+ }
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
index 581681988c28..de91aa904581 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
@@ -26,6 +26,8 @@
import java.security.Key;
import java.security.KeyStore;
import java.security.MessageDigest;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Properties;
import javax.crypto.spec.SecretKeySpec;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -33,12 +35,15 @@
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.BeforeClass;
+import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
@Category({ MiscTests.class, SmallTests.class })
+@RunWith(Parameterized.class)
public class TestKeyStoreKeyProvider {
@ClassRule
@@ -53,14 +58,33 @@ public class TestKeyStoreKeyProvider {
static File storeFile;
static File passwordFile;
- @BeforeClass
- public static void setUp() throws Exception {
+ protected KeyProvider provider;
+
+ @Parameterized.Parameter(0)
+ public boolean withPasswordOnAlias;
+ @Parameterized.Parameter(1)
+ public boolean withPasswordFile;
+
+ @Parameterized.Parameters(name = "withPasswordOnAlias={0} withPasswordFile={1}")
+ public static Collection
+ */
+@CoreCoprocessor @InterfaceAudience.Private
+public class PBEKeymetaServiceEndpoint implements MasterCoprocessor {
+ private static final Logger LOG = LoggerFactory.getLogger(PBEKeymetaServiceEndpoint.class);
private MasterServices master = null;
- private KeyMetaManager keyMetaManager;
private final PBEAdminService pbeAdminService = new KeyMetaAdminServiceImpl();
- @Override public void start(CoprocessorEnvironment env) throws IOException {
+ /**
+ * Starts the coprocessor by initializing the reference to the {@link org.apache.hadoop.hbase.master.MasterServices}
+ * instance.
+ *
+ * @param env The coprocessor environment.
+ * @throws IOException If an error occurs during initialization.
+ */
+ @Override
+ public void start(CoprocessorEnvironment env) throws IOException {
if (!(env instanceof HasMasterServices)) {
throw new IOException("Does not implement HMasterServices");
}
master = ((HasMasterServices) env).getMasterServices();
- keyMetaManager = new KeyMetaManager(master);
}
+ /**
+ * Returns an iterable of the available coprocessor services, which includes the
+ * {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} implemented by
+ * {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaServiceEndpoint.KeyMetaAdminServiceImpl}.
+ *
+ * @return An iterable of the available coprocessor services.
+ */
@Override
public Iterable getServices() {
return Collections.singleton(pbeAdminService);
}
+ /**
+ * The implementation of the {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService}
+ * interface, which provides the actual method implementations for enabling PBE.
+ */
private class KeyMetaAdminServiceImpl extends PBEAdminService {
+ /**
+ * Enables PBE for a given tenant and namespace, as specified in the provided request.
+ *
+ * @param controller The RPC controller.
+ * @param request The request containing the tenant and table specifications.
+ * @param done The callback to be invoked with the response.
+ */
@Override
public void enablePBE(RpcController controller, PBEAdminRequest request,
- RpcCallback done) {
+ RpcCallback done) {
PBEAdminResponse.Builder builder =
- PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix());
+ PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix())
+ .setKeyNamespace(request.getKeyNamespace());
byte[] pbe_prefix = null;
try {
pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix());
- }
- catch (IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
CoprocessorRpcUtils.setControllerException(controller, new IOException(
"Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e));
}
if (pbe_prefix != null) {
try {
- PBEKeyStatus pbeKeyStatus = keyMetaManager.enablePBE(request.getPbePrefix());
+ PBEKeyStatus pbeKeyStatus = master.getPBEKeymetaAdmin()
+ .enablePBE(request.getPbePrefix(), request.getKeyNamespace());
builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(pbeKeyStatus.getVal()));
} catch (IOException e) {
CoprocessorRpcUtils.setControllerException(controller, e);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
similarity index 55%
rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
index fa5ead093ebc..f42370c9fc48 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/KeyMetaTableAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
@@ -23,8 +23,13 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
import org.apache.hadoop.hbase.util.Bytes;
@@ -32,11 +37,13 @@
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
+/**
+ * Accessor for PBE keymeta table.
+ */
@InterfaceAudience.Private
-public class KeyMetaTableAccessor extends PBEKeyManager {
+public class PBEKeymetaTableAccessor extends PBEKeyManager {
private static final String KEY_META_INFO_FAMILY_STR = "info";
public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR);
@@ -44,9 +51,6 @@ public class KeyMetaTableAccessor extends PBEKeyManager {
public static final TableName KEY_META_TABLE_NAME =
TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "keymeta");
- public static final String PBE_PREFIX_QUAL_NAME = "pbe_prefix";
- public static final byte[] PBE_PREFIX_QUAL_BYTES = Bytes.toBytes(PBE_PREFIX_QUAL_NAME);
-
public static final String DEK_METADATA_QUAL_NAME = "dek_metadata";
public static final byte[] DEK_METADATA_QUAL_BYTES = Bytes.toBytes(DEK_METADATA_QUAL_NAME);
@@ -65,24 +69,55 @@ public class KeyMetaTableAccessor extends PBEKeyManager {
public static final String KEY_STATUS_QUAL_NAME = "key_status";
public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME);
- public KeyMetaTableAccessor(Server server) {
+ public PBEKeymetaTableAccessor(Server server) {
super(server);
}
public void addKey(PBEKeyData keyData) throws IOException {
- long refreshTime = EnvironmentEdgeManager.currentTime();
- final Put putForPrefix = addMutationColumns(new Put(keyData.getPbe_prefix()), keyData,
- refreshTime);
final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)),
- keyData, refreshTime);
+ keyData);
+
+ Connection connection = server.getConnection();
+ try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
+ table.put(putForMetadata);
+ }
+ }
+ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) throws IOException {
Connection connection = server.getConnection();
+ byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix,
+ Bytes.toBytes(keyNamespace));
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
- table.put(Arrays.asList(putForPrefix, putForMetadata));
+ PrefixFilter prefixFilter = new PrefixFilter(prefixForScan);
+ Scan scan = new Scan();
+ scan.setFilter(prefixFilter);
+ scan.addFamily(KEY_META_INFO_FAMILY);
+
+ ResultScanner scanner = table.getScanner(scan);
+ List activeKeys = new ArrayList<>();
+ for (Result result : scanner) {
+ PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result);
+ if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) {
+ activeKeys.add(keyData);
+ }
+ }
+
+ return activeKeys;
}
}
- private Put addMutationColumns(Put put, PBEKeyData keyData, long refreshTime) {
+ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
+ throws IOException {
+ Connection connection = server.getConnection();
+ try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
+ byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace,
+ PBEKeyData.makeMetadataHash(keyMetadata));
+ Result result = table.get(new Get(rowKey));
+ return parseFromResult(pbePrefix, keyNamespace, result);
+ }
+ }
+
+ private Put addMutationColumns(Put put, PBEKeyData keyData) {
if (keyData.getTheKey() != null) {
put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES,
Bytes.toBytes(keyData.getKeyChecksum()));
@@ -92,25 +127,39 @@ private Put addMutationColumns(Put put, PBEKeyData keyData, long refreshTime) {
.addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes())
//.addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, null)
//.addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, null)
- .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES, Bytes.toBytes(refreshTime))
+ .addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES,
+ Bytes.toBytes(keyData.getRefreshTimestamp()))
.addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES,
new byte[] { keyData.getKeyStatus().getVal() })
;
}
private byte[] constructRowKeyForMetadata(PBEKeyData keyData) {
- byte[] pbePrefix = keyData.getPbe_prefix();
- int prefixLength = pbePrefix.length;
- byte[] keyMetadataHash = keyData.getKeyMetadataHash();
- return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, keyMetadataHash);
+ return constructRowKeyForMetadata(keyData.getPbe_prefix(), keyData.getKeyNamespace(),
+ keyData.getKeyMetadataHash());
}
- private byte[] extractPBEPrefix(byte[] rowkey) {
- int prefixLength = Bytes.toInt(rowkey);
- return Bytes.copy(rowkey, Bytes.SIZEOF_INT, prefixLength);
+ private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNamespace,
+ byte[] keyMetadataHash) {
+ int prefixLength = pbePrefix.length;
+ return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, Bytes.toBytesBinary(keyNamespace),
+ keyMetadataHash);
}
- private byte[] extractKeyMetadataHash(byte[] rowkey, byte[] pbePreefix) {
- return Bytes.copy(rowkey, Bytes.SIZEOF_INT + pbePreefix.length, rowkey.length);
+ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) {
+ if (result == null || result.isEmpty()) {
+ return null;
+ }
+ PBEKeyStatus keyStatus = PBEKeyStatus.forValue(
+ result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]);
+ String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY,
+ DEK_METADATA_QUAL_BYTES));
+ long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES));
+ byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES);
+ //byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES);
+ //byte[] stkChecksum = result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES);
+
+ return new PBEKeyData(pbePrefix, keyNamespace, null, keyStatus, dekMetadata,
+ refreshedTimestamp);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 843cb413b596..6eac904d6e4c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -122,7 +122,7 @@
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-import org.apache.hadoop.hbase.keymeta.KeyMetaMasterService;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
@@ -356,8 +356,8 @@ public class HMaster extends HBaseServerBase implements Maste
// file system manager for the master FS operations
private MasterFileSystem fileSystemManager;
private MasterWalManager walManager;
- private ClusterKeyManager clusterKeyManager;
- private KeyMetaMasterService keyMetaMasterService;
+ private PBEClusterKeyManager pbeClusterKeyManager;
+ private PBEKeymetaMasterService pbeKeymetaMasterService;
// manager to manage procedure-based WAL splitting, can be null if current
// is zk-based WAL splitting. SplitWALManager will replace SplitLogManager
@@ -995,8 +995,8 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE
ZKClusterId.setClusterId(this.zooKeeper, fileSystemManager.getClusterId());
this.clusterId = clusterId.toString();
- clusterKeyManager = new ClusterKeyManager(this);
- clusterKeyManager.ensureClusterKeyInitialized();
+ pbeClusterKeyManager = new PBEClusterKeyManager(this);
+ pbeClusterKeyManager.ensureClusterKeyInitialized();
// Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their
// hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set
@@ -1037,8 +1037,8 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE
Map, List>> procsByType = procedureExecutor
.getActiveProceduresNoCopy().stream().collect(Collectors.groupingBy(p -> p.getClass()));
- keyMetaMasterService = new KeyMetaMasterService(this);
- keyMetaMasterService.init();
+ pbeKeymetaMasterService = new PBEKeymetaMasterService(this);
+ pbeKeymetaMasterService.init();
// Create Assignment Manager
this.assignmentManager = createAssignmentManager(this, masterRegion);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java
deleted file mode 100644
index ce85e11f7518..000000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/KeyMetaSchema.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master;
-
-import org.apache.yetus.audience.InterfaceAudience;
-
-/**
- * API for managing the keys in key_meta table.
- */
-@InterfaceAudience.Public
-public interface KeyMetaSchema {
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
similarity index 92%
rename from hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
index ed54231e93d7..f3e0f60a8c61 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterKeyManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
@@ -20,20 +20,19 @@
import java.io.IOException;
import java.util.List;
import java.util.UUID;
-import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider;
-import org.apache.hadoop.hbase.keymeta.ClusterKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor;
import org.apache.yetus.audience.InterfaceAudience;
import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX;
@InterfaceAudience.Private
-public class ClusterKeyManager extends ClusterKeyAccessor {
+public class PBEClusterKeyManager extends PBEClusterKeyAccessor {
private final MasterServices master;
- public ClusterKeyManager(MasterServices master) throws IOException {
+ public PBEClusterKeyManager(MasterServices master) throws IOException {
super(master);
this.master = master;
}
@@ -63,8 +62,8 @@ public PBEKeyData rotateClusterKeyIfChanged() throws IOException {
return null;
}
Path latestFile = getLatestClusterKeyFile();
- String latestKeyMeta = loadKeyMetadata(latestFile);
- return rotateClusterKey(latestKeyMeta);
+ String latestKeyMetadata = loadKeyMetadata(latestFile);
+ return rotateClusterKey(latestKeyMetadata);
}
private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 1801b4d971ed..165b1cd68a34 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -806,8 +806,8 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co
this.conf = new CompoundConfiguration().add(confParam).addBytesMap(htd.getValues());
this.cellComparator = htd.isMetaTable()
|| conf.getBoolean(USE_META_CELL_COMPARATOR, DEFAULT_USE_META_CELL_COMPARATOR)
- ? MetaCellComparator.META_COMPARATOR
- : CellComparatorImpl.COMPARATOR;
+ ? MetaCellComparator.META_COMPARATOR
+ : CellComparatorImpl.COMPARATOR;
this.lock = new ReentrantReadWriteLock(
conf.getBoolean(FAIR_REENTRANT_CLOSE_LOCK, DEFAULT_FAIR_REENTRANT_CLOSE_LOCK));
this.regionLockHolders = new ConcurrentHashMap<>();
@@ -906,9 +906,9 @@ public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration co
// disable stats tracking system tables, but check the config for everything else
this.regionStatsEnabled = htd.getTableName().getNamespaceAsString()
.equals(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR)
- ? false
- : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE,
- HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE);
+ ? false
+ : conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE,
+ HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE);
this.maxCellSize = conf.getLong(HBASE_MAX_CELL_SIZE_KEY, DEFAULT_MAX_CELL_SIZE);
this.miniBatchSize =
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index a4105a31bfac..d80f58edf366 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -120,6 +120,9 @@
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.mob.MobFileCache;
import org.apache.hadoop.hbase.mob.RSMobFileCleanerChore;
@@ -596,7 +599,6 @@ protected RegionServerCoprocessorHost getCoprocessorHost() {
return getRegionServerCoprocessorHost();
}
- @Override
protected boolean canCreateBaseZNode() {
return !clusterMode();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index a4ca20fa7311..1e5f9db2d3c3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -41,6 +41,8 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationGroupOffset;
@@ -366,6 +368,14 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public FileSystem getFileSystem() {
return null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
index e78ca7d0cdb7..8ce567d25f12 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
@@ -38,6 +38,8 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.favored.FavoredNodesManager;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
import org.apache.hadoop.hbase.master.hbck.HbckChore;
import org.apache.hadoop.hbase.master.janitor.CatalogJanitor;
@@ -116,6 +118,14 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public CatalogJanitor getCatalogJanitor() {
return null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
index a25bae6ec7bd..2b5f138a6a0f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
@@ -52,6 +52,8 @@
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.mob.MobFileCache;
import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
@@ -556,6 +558,14 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public void updateRegionFavoredNodesMapping(String encodedRegionName,
List favoredNodes) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
index 443019bee808..f151e20bf587 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
@@ -46,6 +46,8 @@
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -837,6 +839,14 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public FileSystem getFileSystem() {
return null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index 58ffdcf91d43..44d35ae442ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -55,6 +55,8 @@
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.RegionServerServices;
import org.apache.hadoop.hbase.security.SecurityInfo;
@@ -350,6 +352,14 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public Connection createConnection(Configuration conf) throws IOException {
return null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
index 90f4a7555b93..cbf6d0773d3e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
@@ -26,6 +26,8 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.slf4j.Logger;
@@ -100,6 +102,14 @@ public ChoreService getChoreService() {
throw new UnsupportedOperationException();
}
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public FileSystem getFileSystem() {
throw new UnsupportedOperationException();
diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
index 4f8d9f1e830f..4a9dfbd7f998 100644
--- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
@@ -21,7 +21,7 @@ module Hbase
class PBEAdmin
def initialize(connection)
@connection = connection
- @admin = org.apache.hadoop.hbase.keymeta.KeyMetaAdminClient.new(connection)
+ @admin = org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient.new(connection)
@hb_admin = @connection.getAdmin
end
@@ -30,7 +30,10 @@ def close
end
def pbe_enable(pbe_prefix)
- @admin.enablePBE(pbe_prefix)
+ prefixInfo = pbe_prefix.split(':')
+ assert prefixInfo.length <= 2, 'Invalid prefix:namespace format'
+ @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1? prefixInfo[1] :
+ org.apache.hadoop.hbase.io.crypto.PBEKeyData.KEY_NAMESPACE_GLOBAL)
end
end
end
From 2d2cdb5af1b0f3ea9780f53a55b07ea8a1eafe7f Mon Sep 17 00:00:00 2001
From: Hari Dara
Date: Mon, 10 Mar 2025 14:01:57 +0530
Subject: [PATCH 05/70] Integrated with the STK cache to cache the DEK key
material
---
.../hbase/keymeta/PBEKeymetaAdminClient.java | 5 +-
.../hadoop/hbase/security/EncryptionUtil.java | 54 ++++++++++++++++---
.../hadoop/hbase/io/crypto/Encryption.java | 40 +++++++++-----
.../hadoop/hbase/io/crypto/PBEKeyData.java | 15 +++---
.../io/crypto/PBEKeyStoreKeyProvider.java | 12 +++--
.../apache/hadoop/hbase/HBaseServerBase.java | 19 +++++--
.../hbase/MockRegionServerServices.java | 5 ++
.../java/org/apache/hadoop/hbase/Server.java | 13 +++++
.../hbase/keymeta/PBEClusterKeyAccessor.java | 54 ++++++++++++-------
.../hbase/keymeta/PBEClusterKeyCache.java | 29 ++++++----
.../hadoop/hbase/keymeta/PBEKeyAccessor.java | 7 ++-
.../hbase/keymeta/PBEKeymetaAdminImpl.java | 3 +-
.../keymeta/PBEKeymetaTableAccessor.java | 48 +++++++++++------
.../apache/hadoop/hbase/master/HMaster.java | 3 ++
.../hbase/master/PBEClusterKeyManager.java | 8 +--
.../hbase/regionserver/HRegionServer.java | 5 ++
.../regionserver/ReplicationSyncUp.java | 5 ++
.../hbase/master/MockNoopMasterServices.java | 5 ++
.../hadoop/hbase/master/MockRegionServer.java | 5 ++
.../hbase/master/TestActiveMasterManager.java | 15 ++++++
.../cleaner/TestReplicationHFileCleaner.java | 15 ++++++
...onProcedureStorePerformanceEvaluation.java | 15 ++++++
.../regionserver/TestHeapMemoryManager.java | 5 ++
.../token/TestTokenAuthentication.java | 5 ++
.../apache/hadoop/hbase/util/MockServer.java | 5 ++
hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 11 ++--
.../main/ruby/shell/commands/pbe_enable.rb | 3 +-
27 files changed, 319 insertions(+), 90 deletions(-)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
index e150edc143dd..5ca264a147ee 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
@@ -21,10 +21,11 @@ public PBEKeymetaAdminClient(Connection conn) throws IOException {
this.stub = PBEAdminProtos.PBEAdminService.newBlockingStub(conn.getAdmin().coprocessorService());
}
- @Override public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException {
+ @Override
+ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException {
try {
PBEAdminResponse pbeAdminResponse = stub.enablePBE(null,
- PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).build());
+ PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build());
LOG.info("Got response: " + pbeAdminResponse);
return PBEKeyStatus.forValue((byte) pbeAdminResponse.getPbeStatus().getNumber());
} catch (ServiceException e) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
index 04fc5201cc10..4d0efba8122f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
@@ -80,6 +80,20 @@ public static byte[] wrapKey(Configuration conf, byte[] key, String algorithm)
* @return the encrypted key bytes
*/
public static byte[] wrapKey(Configuration conf, String subject, Key key) throws IOException {
+ return wrapKey(conf, subject, key, null);
+ }
+
+ /**
+ * Protect a key by encrypting it with the secret key of the given subject or kek. The
+ * configuration must be set up correctly for key alias resolution. Only one of the
+ * {@code subject} or {@code kek} needs to be specified and the other one can be {@code null}.
+ * @param conf configuration
+ * @param subject subject key alias
+ * @param key the key
+ * @param kek the key encryption key
+ * @return the encrypted key bytes
+ */
+ public static byte[] wrapKey(Configuration conf, String subject, Key key, Key kek) throws IOException {
// Wrap the key with the configured encryption algorithm.
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
Cipher cipher = Encryption.getCipher(conf, algorithm);
@@ -100,8 +114,13 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws
builder
.setHash(UnsafeByteOperations.unsafeWrap(Encryption.computeCryptoKeyHash(conf, keyBytes)));
ByteArrayOutputStream out = new ByteArrayOutputStream();
- Encryption.encryptWithSubjectKey(out, new ByteArrayInputStream(keyBytes), subject, conf, cipher,
- iv);
+ if (kek != null) {
+ Encryption.encryptWithGivenKey(kek, out, new ByteArrayInputStream(keyBytes), cipher, iv);
+ }
+ else {
+ Encryption.encryptWithSubjectKey(out, new ByteArrayInputStream(keyBytes), subject, conf,
+ cipher, iv);
+ }
builder.setData(UnsafeByteOperations.unsafeWrap(out.toByteArray()));
// Build and return the protobuf message
out.reset();
@@ -118,6 +137,21 @@ public static byte[] wrapKey(Configuration conf, String subject, Key key) throws
* @return the raw key bytes
*/
public static Key unwrapKey(Configuration conf, String subject, byte[] value)
+ throws IOException, KeyException {
+ return unwrapKey(conf, subject, value, null);
+ }
+
+ /**
+ * Unwrap a key by decrypting it with the secret key of the given subject. The configuration must
+ * be set up correctly for key alias resolution. Only one of the {@code subject} or {@code kek}
+ * needs to be specified and the other one can be {@code null}.
+ * @param conf configuration
+ * @param subject subject key alias
+ * @param value the encrypted key bytes
+ * @param kek the key encryption key
+ * @return the raw key bytes
+ */
+ public static Key unwrapKey(Configuration conf, String subject, byte[] value, Key kek)
throws IOException, KeyException {
EncryptionProtos.WrappedKey wrappedKey =
EncryptionProtos.WrappedKey.parser().parseDelimitedFrom(new ByteArrayInputStream(value));
@@ -126,11 +160,11 @@ public static Key unwrapKey(Configuration conf, String subject, byte[] value)
if (cipher == null) {
throw new RuntimeException("Cipher '" + algorithm + "' not available");
}
- return getUnwrapKey(conf, subject, wrappedKey, cipher);
+ return getUnwrapKey(conf, subject, wrappedKey, cipher, kek);
}
private static Key getUnwrapKey(Configuration conf, String subject,
- EncryptionProtos.WrappedKey wrappedKey, Cipher cipher) throws IOException, KeyException {
+ EncryptionProtos.WrappedKey wrappedKey, Cipher cipher, Key kek) throws IOException, KeyException {
String configuredHashAlgorithm = Encryption.getConfiguredHashAlgorithm(conf);
String wrappedHashAlgorithm = wrappedKey.getHashAlgorithm().trim();
if (!configuredHashAlgorithm.equalsIgnoreCase(wrappedHashAlgorithm)) {
@@ -143,8 +177,14 @@ private static Key getUnwrapKey(Configuration conf, String subject,
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] iv = wrappedKey.hasIv() ? wrappedKey.getIv().toByteArray() : null;
- Encryption.decryptWithSubjectKey(out, wrappedKey.getData().newInput(), wrappedKey.getLength(),
- subject, conf, cipher, iv);
+ if (kek != null) {
+ Encryption.decryptWithGivenKey(kek, out, wrappedKey.getData().newInput(), wrappedKey.getLength(),
+ cipher, iv);
+ }
+ else {
+ Encryption.decryptWithSubjectKey(out, wrappedKey.getData().newInput(), wrappedKey.getLength(),
+ subject, conf, cipher, iv);
+ }
byte[] keyBytes = out.toByteArray();
if (wrappedKey.hasHash()) {
if (
@@ -176,7 +216,7 @@ public static Key unwrapWALKey(Configuration conf, String subject, byte[] value)
if (cipher == null) {
throw new RuntimeException("Cipher '" + algorithm + "' not available");
}
- return getUnwrapKey(conf, subject, wrappedKey, cipher);
+ return getUnwrapKey(conf, subject, wrappedKey, cipher, null);
}
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index 5dac1af2c27a..c0abe8ccc59f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -468,6 +468,19 @@ public static void encryptWithSubjectKey(OutputStream out, InputStream in, Strin
if (key == null) {
throw new IOException("No key found for subject '" + subject + "'");
}
+ encryptWithGivenKey(key, out, in, cipher, iv);
+ }
+
+ /**
+ * Encrypts a block of plaintext with the specified symmetric key.
+ * @param key The symmetric key
+ * @param out ciphertext
+ * @param in plaintext
+ * @param cipher the encryption algorithm
+ * @param iv the initialization vector, can be null
+ */
+ public static void encryptWithGivenKey(Key key, OutputStream out, InputStream in,
+ Cipher cipher, byte[] iv) throws IOException {
Encryptor e = cipher.getEncryptor();
e.setKey(key);
e.setIv(iv); // can be null
@@ -490,19 +503,16 @@ public static void decryptWithSubjectKey(OutputStream out, InputStream in, int o
if (key == null) {
throw new IOException("No key found for subject '" + subject + "'");
}
- Decryptor d = cipher.getDecryptor();
- d.setKey(key);
- d.setIv(iv); // can be null
try {
- decrypt(out, in, outLen, d);
+ decryptWithGivenKey(key, out, in, outLen, cipher, iv);
} catch (IOException e) {
// If the current cipher algorithm fails to unwrap, try the alternate cipher algorithm, if one
// is configured
String alternateAlgorithm = conf.get(HConstants.CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY);
if (alternateAlgorithm != null) {
if (LOG.isDebugEnabled()) {
- LOG.debug("Unable to decrypt data with current cipher algorithm '"
- + conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES)
+ LOG.debug("Unable to decrypt data with current cipher algorithm '" + conf.get(
+ HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES)
+ "'. Trying with the alternate cipher algorithm '" + alternateAlgorithm
+ "' configured.");
}
@@ -510,16 +520,22 @@ public static void decryptWithSubjectKey(OutputStream out, InputStream in, int o
if (alterCipher == null) {
throw new RuntimeException("Cipher '" + alternateAlgorithm + "' not available");
}
- d = alterCipher.getDecryptor();
- d.setKey(key);
- d.setIv(iv); // can be null
- decrypt(out, in, outLen, d);
- } else {
- throw new IOException(e);
+ decryptWithGivenKey(key, out, in, outLen, alterCipher, iv);
+ }
+ else {
+ throw e;
}
}
}
+ public static void decryptWithGivenKey(Key key, OutputStream out, InputStream in, int outLen,
+ Cipher cipher, byte[] iv) throws IOException {
+ Decryptor d = cipher.getDecryptor();
+ d.setKey(key);
+ d.setIv(iv); // can be null
+ decrypt(out, in, outLen, d);
+ }
+
private static ClassLoader getClassLoaderForClass(Class> c) {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
if (cl == null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
index 13cb51d72041..865966ee46cb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
@@ -166,14 +166,17 @@ public long getRefreshTimestamp() {
*/
public long getKeyChecksum() {
if (keyChecksum == 0) {
- DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16);
- byte[] data = theKey.getEncoded();
- dataChecksum.update(data, 0, data.length);
- keyChecksum = dataChecksum.getValue();
+ keyChecksum = constructKeyChecksum(theKey.getEncoded());
}
return keyChecksum;
}
+ public static long constructKeyChecksum(byte[] data) {
+ DataChecksum dataChecksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 16);
+ dataChecksum.update(data, 0, data.length);
+ return dataChecksum.getValue();
+ }
+
/**
* Computes the hash of the key metadata. If the hash has already been computed, this method
* returns the previously computed value. The hash is computed using the MD5 algorithm.
@@ -182,12 +185,12 @@ public long getKeyChecksum() {
*/
public byte[] getKeyMetadataHash() {
if (keyMetadataHash == null) {
- keyMetadataHash = makeMetadataHash(keyMetadata);
+ keyMetadataHash = constructMetadataHash(keyMetadata);
}
return keyMetadataHash;
}
- public static byte[] makeMetadataHash(String metadata) {
+ public static byte[] constructMetadataHash(String metadata) {
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
index f801bc9d7836..4c958a30b01d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
@@ -17,11 +17,13 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe
private Configuration conf;
- @Override public void initConfig(Configuration conf) {
+ @Override
+ public void initConfig(Configuration conf) {
this.conf = conf;
}
- @Override public PBEKeyData getClusterKey(byte[] clusterId) {
+ @Override
+ public PBEKeyData getClusterKey(byte[] clusterId) {
checkConfig();
String masterKeyAlias = conf.get(HConstants.CRYPTO_PBE_MASTERKEY_NAME_CONF_KEY, null);
if (masterKeyAlias == null) {
@@ -38,7 +40,8 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe
keyMetadata);
}
- @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException {
+ @Override
+ public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException {
checkConfig();
String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix);
String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." +
@@ -47,7 +50,8 @@ public class PBEKeyStoreKeyProvider extends KeyStoreKeyProvider implements PBEKe
return unwrapKey(keyMetadata);
}
- @Override public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException {
+ @Override
+ public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException {
Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyMetadataStr,
HashMap.class);
String encodedPrefix = keyMetadata.get(KEY_METADATA_PREFIX);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
index 30bcd40a7a0a..ddf0fe578cff 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
@@ -52,6 +52,8 @@
import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl;
@@ -190,8 +192,9 @@ public abstract class HBaseServerBase> extends
protected final NettyEventLoopGroupConfig eventLoopGroupConfig;
- private PBEKeymetaAdminImpl pbeKeymetaAdmin;
- private PBEKeyAccessor pbeKeyAccessor;
+ private PBEClusterKeyCache pbeClusterKeyCache;
+ protected PBEKeymetaAdminImpl pbeKeymetaAdmin;
+ protected PBEKeyAccessor pbeKeyAccessor;
private void setupSignalHandlers() {
if (!SystemUtils.IS_OS_WINDOWS) {
@@ -290,7 +293,6 @@ public HBaseServerBase(Configuration conf, String name) throws IOException {
initializeFileSystem();
pbeKeymetaAdmin = new PBEKeymetaAdminImpl(this);
- pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin);
int choreServiceInitialSize =
conf.getInt(CHORE_SERVICE_INITIAL_POOL_SIZE, DEFAULT_CHORE_SERVICE_INITIAL_POOL_SIZE);
@@ -422,6 +424,17 @@ public PBEKeyAccessor getPBEKeyAccessor() {
return pbeKeyAccessor;
}
+ @Override
+ public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return pbeClusterKeyCache;
+ }
+
+ protected void buildPBEClusterKeyCache() throws IOException {
+ if (pbeClusterKeyCache == null) {
+ pbeClusterKeyCache = PBEClusterKeyCache.createCache(new PBEClusterKeyAccessor(this));
+ }
+ }
+
protected final void shutdownChore(ScheduledChore chore) {
if (chore != null) {
chore.shutdown();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
index d0820464ad5d..c14a28cd8f9d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.mob.MobFileCache;
@@ -258,6 +259,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
index 57ac18bcd0a6..dcca89e8b2fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.AsyncConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
@@ -85,7 +86,19 @@ default AsyncConnection getAsyncConnection() {
/** Returns The {@link ChoreService} instance for this server */
ChoreService getChoreService();
+ /**
+ * @return the cache for cluster keys.
+ */
+ public PBEClusterKeyCache getPBEClusterKeyCache();
+
+ /**
+ * @return the accessor for cluster keys.
+ */
public PBEKeyAccessor getPBEKeyAccessor();
+
+ /**
+ * @return the admin for keymeta.
+ */
public PBEKeymetaAdmin getPBEKeymetaAdmin();
/** Returns Return the FileSystem object used (can return null!). */
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
index 1e3ee5bbde5e..ef62e92d93c6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
@@ -28,7 +28,10 @@
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Comparator;
import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX;
@InterfaceAudience.Private
@@ -44,24 +47,36 @@ public Path getLatestClusterKeyFile() throws IOException {
if (! isPBEEnabled()) {
return null;
}
- int currentMaxSeqNum = findLatestKeySequence(getAllClusterKeys());
+ List allClusterKeyFiles = getAllClusterKeyFiles();
+ if (allClusterKeyFiles.isEmpty()) {
+ throw new RuntimeException("No cluster key initialized yet");
+ }
+ int currentMaxSeqNum = extractKeySequence(allClusterKeyFiles.get(0));
return new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + currentMaxSeqNum);
}
- public List getAllClusterKeys() throws IOException {
- if (! isPBEEnabled()) {
+ /**
+ * Return all available cluster key files and return them in the order of latest to oldest.
+ * If no cluster key files are available, then return an empty list. If PBE is not enabled,
+ * then return null.
+ *
+ * @return a list of all available cluster key files
+ * @throws IOException
+ */
+ public List getAllClusterKeyFiles() throws IOException {
+ if (!isPBEEnabled()) {
return null;
}
FileSystem fs = server.getFileSystem();
- List clusterKeys = new ArrayList<>();
- for (FileStatus st: fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) {
+ Map clusterKeys = new TreeMap<>(Comparator.reverseOrder());
+ for (FileStatus st : fs.globStatus(new Path(clusterKeyDir, CLUSTER_KEY_FILE_PREFIX + "*"))) {
Path keyPath = st.getPath();
- extractClusterKeySeqNum(keyPath); // Just check for validity.
- clusterKeys.add(keyPath);
+ int seqNum = extractClusterKeySeqNum(keyPath);
+ clusterKeys.put(seqNum, keyPath);
}
- return clusterKeys;
- }
+ return new ArrayList<>(clusterKeys.values());
+ }
public PBEKeyData loadClusterKey(Path keyPath) throws IOException {
PBEKeyProvider provider = getKeyProvider();
return provider.unwrapKey(loadKeyMetadata(keyPath));
@@ -79,17 +94,18 @@ public int extractClusterKeySeqNum(Path keyPath) throws IOException {
throw new IOException("Couldn't parse key file name: " + keyPath.getName());
}
- protected int findLatestKeySequence(List clusterKeys) throws IOException {
- int maxKeySeq = -1;
- for (Path keyPath: clusterKeys) {
- if (keyPath.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) {
- int keySeq = Integer.valueOf(keyPath.getName().substring(CLUSTER_KEY_FILE_PREFIX.length()));
- if (keySeq > maxKeySeq) {
- maxKeySeq = keySeq;
- }
- }
+ /**
+ * Extract the key sequence number from the cluster key file name.
+ * @param clusterKeyFile
+ * @return The sequence or {@code -1} if not a valid sequence file.
+ * @throws IOException
+ */
+ protected int extractKeySequence(Path clusterKeyFile) throws IOException {
+ int keySeq = -1;
+ if (clusterKeyFile.getName().startsWith(CLUSTER_KEY_FILE_PREFIX)) {
+ keySeq = Integer.valueOf(clusterKeyFile.getName().substring(CLUSTER_KEY_FILE_PREFIX.length()));
}
- return maxKeySeq;
+ return keySeq;
}
protected String loadKeyMetadata(Path keyPath) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java
index 9ac76164d1e3..a77583ecf35b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyCache.java
@@ -23,32 +23,39 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.TreeMap;
@InterfaceAudience.Private
public class PBEClusterKeyCache {
private static final Logger LOG = LoggerFactory.getLogger(PBEClusterKeyCache.class);
- private PBEKeyData latestClusterKey;
- private Map clusterKeys = new HashMap<>();
+ private final PBEKeyData latestClusterKey;
+ private final Map clusterKeys;
- public PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException {
- List allClusterKeys = accessor.getAllClusterKeys();
- int latestKeySequence = accessor.findLatestKeySequence(allClusterKeys);
+ /**
+ * Construct the Cluster Key cache from the specified accessor.
+ * @param accessor
+ * @return the cache or {@code null} if no keys are found.
+ * @throws IOException
+ */
+ public static PBEClusterKeyCache createCache(PBEClusterKeyAccessor accessor) throws IOException {
+ List allClusterKeyFiles = accessor.getAllClusterKeyFiles();
+ if (allClusterKeyFiles.isEmpty()) {
+ LOG.warn("No cluster key files found, skipping cache creation");
+ return null;
+ }
PBEKeyData latestClusterKey = null;
- for (Path keyPath: allClusterKeys) {
+ Map clusterKeys = new TreeMap<>();
+ for (Path keyPath: allClusterKeyFiles) {
LOG.info("Loading cluster key from: {}", keyPath);
PBEKeyData keyData = accessor.loadClusterKey(keyPath);
- if (accessor.extractClusterKeySeqNum(keyPath) == latestKeySequence) {
+ if (latestClusterKey == null) {
latestClusterKey = keyData;
}
clusterKeys.put(keyData.getKeyChecksum(), keyData);
}
- if (latestClusterKey == null) {
- throw new RuntimeException("Expected to find a key for sequence: " + latestKeySequence);
- }
return new PBEClusterKeyCache(clusterKeys, latestClusterKey);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
index 0aefeddeca9b..5d79875a2c46 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
+import java.security.KeyException;
import java.util.List;
/**
@@ -46,7 +47,8 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) {
* @return The key data
* @throws IOException if an error occurs while retrieving the key
*/
- public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata) throws IOException {
+ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
+ throws IOException, KeyException {
PBEKeyData keyData = keyDataCache.getEntry(keyMetadata);
if (keyData == null) {
keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata);
@@ -63,7 +65,8 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada
* @return The key data
* @throws IOException if an error occurs while retrieving the key
*/
- public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace) throws IOException {
+ public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace)
+ throws IOException, KeyException {
PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace);
if (keyData == null) {
List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
index f4a4a6b87b1e..c005d5643b20 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
@@ -50,7 +50,8 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
}
PBEKeyProvider provider = getKeyProvider();
PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace);
- LOG.info("Got key data with status: {} for prefix: {}", pbeKey.getKeyStatus(), pbePrefix);
+ LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(),
+ pbeKey.getKeyMetadata(), pbePrefix);
addKey(pbeKey);
return pbeKey.getKeyStatus();
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
index f42370c9fc48..7704f7384d12 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
@@ -32,10 +32,13 @@
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
+import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
+import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
+import java.security.Key;
+import java.security.KeyException;
import java.util.ArrayList;
import java.util.List;
@@ -83,7 +86,8 @@ public void addKey(PBEKeyData keyData) throws IOException {
}
}
- public List getActiveKeys(byte[] pbePrefix, String keyNamespace) throws IOException {
+ public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
+ throws IOException, KeyException {
Connection connection = server.getConnection();
byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix,
Bytes.toBytes(keyNamespace));
@@ -107,26 +111,31 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace) thr
}
public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
- throws IOException {
+ throws IOException, KeyException {
Connection connection = server.getConnection();
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace,
- PBEKeyData.makeMetadataHash(keyMetadata));
+ PBEKeyData.constructMetadataHash(keyMetadata));
Result result = table.get(new Get(rowKey));
return parseFromResult(pbePrefix, keyNamespace, result);
}
}
- private Put addMutationColumns(Put put, PBEKeyData keyData) {
+ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException {
+ PBEKeyData latestClusterKey = server.getPBEClusterKeyCache().getLatestClusterKey();
if (keyData.getTheKey() != null) {
+ byte[] dekWrappedBySTK = EncryptionUtil.wrapKey(server.getConfiguration(), null,
+ keyData.getTheKey(), latestClusterKey.getTheKey());
put.addColumn(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES,
- Bytes.toBytes(keyData.getKeyChecksum()));
+ Bytes.toBytes(keyData.getKeyChecksum()))
+ .addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, dekWrappedBySTK)
+ ;
}
return put.setDurability(Durability.SKIP_WAL)
.setPriority(HConstants.SYSTEMTABLE_QOS)
.addColumn(KEY_META_INFO_FAMILY, DEK_METADATA_QUAL_BYTES, keyData.getKeyMetadata().getBytes())
- //.addColumn(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES, null)
- //.addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES, null)
+ .addColumn(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES,
+ Bytes.toBytes(latestClusterKey.getKeyChecksum()))
.addColumn(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES,
Bytes.toBytes(keyData.getRefreshTimestamp()))
.addColumn(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES,
@@ -146,7 +155,8 @@ private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNam
keyMetadataHash);
}
- private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result) {
+ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result)
+ throws IOException, KeyException {
if (result == null || result.isEmpty()) {
return null;
}
@@ -154,12 +164,20 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result
result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]);
String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY,
DEK_METADATA_QUAL_BYTES));
- long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, REFRESHED_TIMESTAMP_QUAL_BYTES));
+ long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
+ REFRESHED_TIMESTAMP_QUAL_BYTES));
byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES);
- //byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES);
- //byte[] stkChecksum = result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES);
-
- return new PBEKeyData(pbePrefix, keyNamespace, null, keyStatus, dekMetadata,
- refreshedTimestamp);
+ byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES);
+ long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES));
+
+ PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum);
+ Key dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk,
+ clusterKey.getTheKey());
+ PBEKeyData dekKeyData =
+ new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp);
+ if (!Bytes.equals(dekKeyData.getKeyMetadataHash(), dekChecksum)) {
+ throw new RuntimeException("Key has didn't match for key with metadata" + dekMetadata);
+ }
+ return dekKeyData;
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 6eac904d6e4c..6a93fd2ab667 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -122,6 +122,8 @@
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaMasterService;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
@@ -997,6 +999,7 @@ private void finishActiveMasterInitialization() throws IOException, InterruptedE
pbeClusterKeyManager = new PBEClusterKeyManager(this);
pbeClusterKeyManager.ensureClusterKeyInitialized();
+ buildPBEClusterKeyCache();
// Precaution. Put in place the old hbck1 lock file to fence out old hbase1s running their
// hbck1s against an hbase2 cluster; it could do damage. To skip this behavior, set
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
index f3e0f60a8c61..3a8c821fd872 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/PBEClusterKeyManager.java
@@ -41,11 +41,11 @@ public void ensureClusterKeyInitialized() throws IOException {
if (! isPBEEnabled()) {
return;
}
- List clusterKeys = getAllClusterKeys();
+ List clusterKeys = getAllClusterKeyFiles();
if (clusterKeys.isEmpty()) {
LOG.info("Initializing Cluster Key for the first time");
// Double check for cluster key as another HMaster might have succeeded.
- if (rotateClusterKey(null) == null && getAllClusterKeys().isEmpty()) {
+ if (rotateClusterKey(null) == null && getAllClusterKeyFiles().isEmpty()) {
throw new RuntimeException("Failed to generate or save Cluster Key");
}
}
@@ -82,7 +82,9 @@ private PBEKeyData rotateClusterKey(String currentKeyMetadata) throws IOExceptio
}
private boolean saveLatestClusterKey(String keyMetadata) throws IOException {
- int nextClusterKeySeq = findLatestKeySequence(getAllClusterKeys()) + 1;
+ List allClusterKeyFiles = getAllClusterKeyFiles();
+ int nextClusterKeySeq = (allClusterKeyFiles.isEmpty() ? -1
+ : extractKeySequence(allClusterKeyFiles.get(0))) + 1;
LOG.info("Trying to save a new cluster key at seq: {}", nextClusterKeySeq);
MasterFileSystem masterFS = master.getMasterFileSystem();
Path nextClusterKeyPath = new Path(clusterKeyDir,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index d80f58edf366..eb5d9e31b8fa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -120,6 +120,8 @@
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminImpl;
@@ -1451,6 +1453,9 @@ protected void handleReportForDutyResponse(final RegionServerStartupResponse c)
initializeFileSystem();
}
+ buildPBEClusterKeyCache();
+ pbeKeyAccessor = new PBEKeyAccessor(pbeKeymetaAdmin);
+
// hack! Maps DFSClient => RegionServer for logs. HDFS made this
// config param for task trackers, but we can piggyback off of it.
if (this.conf.get("mapreduce.task.attempt.id") == null) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
index 1e5f9db2d3c3..a42c24e1a040 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSyncUp.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.replication.OfflineTableReplicationQueueStorage;
@@ -368,6 +369,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
index 8ce567d25f12..d9b384a1d98e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.executor.ExecutorService;
import org.apache.hadoop.hbase.favored.FavoredNodesManager;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
@@ -118,6 +119,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
index 2b5f138a6a0f..0cb5ea436949 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
@@ -52,6 +52,7 @@
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.mob.MobFileCache;
@@ -558,6 +559,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
index fcb67ed31b47..f850ab0f52b3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
@@ -33,6 +33,9 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskGroup;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -327,5 +330,17 @@ public ClusterStatusTracker getClusterStatusTracker() {
public ActiveMasterManager getActiveMasterManager() {
return activeMasterManager;
}
+
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
index da1bc04d7e03..69de03d223cd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java
@@ -38,6 +38,9 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.replication.ReplicationFactory;
@@ -215,6 +218,18 @@ public Connection getConnection() {
}
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
+
@Override
public FileSystem getFileSystem() {
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java
index 3c55696080e3..c4309acea7d4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStorePerformanceEvaluation.java
@@ -24,6 +24,9 @@
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
+import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.master.region.MasterRegion;
import org.apache.hadoop.hbase.master.region.MasterRegionFactory;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStorePerformanceEvaluation;
@@ -57,6 +60,18 @@ public Configuration getConfiguration() {
public ServerName getServerName() {
return serverName;
}
+
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
+ @Override public PBEKeyAccessor getPBEKeyAccessor() {
+ return null;
+ }
+
+ @Override public PBEKeymetaAdmin getPBEKeymetaAdmin() {
+ return null;
+ }
}
private MasterRegion region;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
index f151e20bf587..81c9cb075db5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache;
import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext;
@@ -839,6 +840,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index 44d35ae442ed..8d8b2f177378 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -55,6 +55,7 @@
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.ipc.SimpleRpcServer;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.log.HBaseMarkers;
@@ -352,6 +353,10 @@ public ChoreService getChoreService() {
return null;
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
index cbf6d0773d3e..3896a2a4348e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MockServer.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.AsyncClusterConnection;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
import org.apache.hadoop.hbase.keymeta.PBEKeyAccessor;
import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdmin;
import org.apache.hadoop.hbase.log.HBaseMarkers;
@@ -102,6 +103,10 @@ public ChoreService getChoreService() {
throw new UnsupportedOperationException();
}
+ @Override public PBEClusterKeyCache getPBEClusterKeyCache() {
+ return null;
+ }
+
@Override public PBEKeyAccessor getPBEKeyAccessor() {
return null;
}
diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
index 4a9dfbd7f998..e00f7ca138ce 100644
--- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
@@ -16,12 +16,14 @@
#
include Java
+java_import org.apache.hadoop.hbase.io.crypto.PBEKeyData
+java_import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient
module Hbase
class PBEAdmin
def initialize(connection)
@connection = connection
- @admin = org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient.new(connection)
+ @admin = PBEKeymetaAdminClient.new(connection)
@hb_admin = @connection.getAdmin
end
@@ -31,9 +33,10 @@ def close
def pbe_enable(pbe_prefix)
prefixInfo = pbe_prefix.split(':')
- assert prefixInfo.length <= 2, 'Invalid prefix:namespace format'
- @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1? prefixInfo[1] :
- org.apache.hadoop.hbase.io.crypto.PBEKeyData.KEY_NAMESPACE_GLOBAL)
+ raise(ArgumentError, 'Invalid prefix:namespace format') unless (prefixInfo.length == 1 ||
+ prefixInfo.length == 2)
+ @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] :
+ PBEKeyData::KEY_NAMESPACE_GLOBAL)
end
end
end
diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb
index e74fdc607083..ccaba7762470 100644
--- a/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/pbe_enable.rb
@@ -20,7 +20,8 @@ module Commands
class PbeEnable < Command
def help
<<-EOF
-Enable PBE for a given prefix (in Base64 format)
+Enable PBE for a given prefix:namespace (prefix in Base64 format).
+If no namespace is specified, the global namespace (*) is used.
EOF
end
From 995640c0bac791624b1605cf68b3163b29463036 Mon Sep 17 00:00:00 2001
From: Hari Dara
Date: Tue, 11 Mar 2025 15:32:07 +0530
Subject: [PATCH 06/70] Added key listing to do end2end testing for key
decryption with STK
---
.../hbase/keymeta/PBEKeymetaAdminClient.java | 24 ++++++
.../hadoop/hbase/io/crypto/PBEKeyData.java | 54 ++++++++----
.../hadoop/hbase/keymeta/PBEKeymetaAdmin.java | 14 +++
.../io/crypto/TestPBEKeyStoreKeyProvider.java | 2 +-
.../src/main/protobuf/server/PBEAdmin.proto | 8 ++
.../hadoop/hbase/keymeta/PBEKeyAccessor.java | 7 +-
.../hadoop/hbase/keymeta/PBEKeyDataCache.java | 4 +-
.../hbase/keymeta/PBEKeymetaAdminImpl.java | 33 +++++--
.../keymeta/PBEKeymetaServiceEndpoint.java | 86 +++++++++++++++----
.../keymeta/PBEKeymetaTableAccessor.java | 50 ++++++++---
hbase-shell/src/main/ruby/hbase/pbe_admin.rb | 14 ++-
hbase-shell/src/main/ruby/shell.rb | 1 +
.../ruby/shell/commands/pbe_get_statuses.rb | 45 ++++++++++
13 files changed, 281 insertions(+), 61 deletions(-)
create mode 100644 hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
index 5ca264a147ee..b39e7768e0b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
@@ -1,6 +1,7 @@
package org.apache.hadoop.hbase.keymeta;
import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest;
@@ -11,6 +12,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.security.KeyException;
+import java.util.ArrayList;
+import java.util.List;
@InterfaceAudience.Public
public class PBEKeymetaAdminClient implements PBEKeymetaAdmin {
@@ -32,4 +36,24 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
throw ProtobufUtil.handleRemoteException(e);
}
}
+
+ @Override public List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
+ throws IOException, KeyException {
+ List keyStatuses = new ArrayList<>();
+ try {
+ PBEAdminProtos.PBEGetStatusResponse statusResponse = stub.getPBEStatuses(null,
+ PBEAdminRequest.newBuilder().setPbePrefix(pbePrefix).setKeyNamespace(keyNamespace).build());
+ for (PBEAdminResponse status: statusResponse.getStatusList()) {
+ keyStatuses.add(new PBEKeyData(
+ status.getPbePrefixBytes().toByteArray(),
+ status.getKeyNamespace(), null,
+ PBEKeyStatus.forValue((byte) status.getPbeStatus().getNumber()),
+ status.getKeyMetadata(),
+ status.getRefreshTimestamp()));
+ }
+ } catch (ServiceException e) {
+ throw ProtobufUtil.handleRemoteException(e);
+ }
+ return keyStatuses;
+ }
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
index 865966ee46cb..34524dd6703f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
@@ -26,6 +26,8 @@
import java.security.Key;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+import java.util.Base64;
/**
* This class represents an encryption key data which includes the key itself, its status, metadata
@@ -51,8 +53,8 @@
public class PBEKeyData {
public static final String KEY_NAMESPACE_GLOBAL = "*";
- private byte[] pbe_prefix;
- private String key_namespace;
+ private byte[] pbePrefix;
+ private String keyNamespace;
private Key theKey;
private PBEKeyStatus keyStatus;
private String keyMetadata;
@@ -92,8 +94,8 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta
Preconditions.checkNotNull(keyStatus, "keyStatus should not be null");
Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null");
- this.pbe_prefix = pbe_prefix;
- this.key_namespace = key_namespace;
+ this.pbePrefix = pbe_prefix;
+ this.keyNamespace = key_namespace;
this.theKey = theKey;
this.keyStatus = keyStatus;
this.keyMetadata = keyMetadata;
@@ -105,26 +107,26 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta
*
* @return The PBE prefix as a byte array.
*/
- public byte[] getPbe_prefix() {
- return pbe_prefix;
+ public byte[] getPBEPrefix() {
+ return pbePrefix;
}
/**
- * Returns the namespace associated with the key.
- *
- * @return The namespace as a {@code String}.
+ * Return the PBE prefix in Base64 encoded form.
+ * @return the encoded PBE prefix.
*/
- public String getKeyNamespace() {
- return key_namespace;
+ public String getPBEPrefixEncoded() {
+ return Base64.getEncoder().encodeToString(pbePrefix);
}
+
/**
* Returns the namespace associated with the key.
*
* @return The namespace as a {@code String}.
*/
- public String getKey_namespace() {
- return key_namespace;
+ public String getKeyNamespace() {
+ return keyNamespace;
}
/**
@@ -154,6 +156,12 @@ public String getKeyMetadata() {
return keyMetadata;
}
+ @Override public String toString() {
+ return "PBEKeyData{" + "pbePrefix=" + Arrays.toString(pbePrefix) + ", keyNamespace='"
+ + keyNamespace + '\'' + ", keyStatus=" + keyStatus + ", keyMetadata='" + keyMetadata + '\''
+ + ", refreshTimestamp=" + refreshTimestamp + '}';
+ }
+
public long getRefreshTimestamp() {
return refreshTimestamp;
}
@@ -190,6 +198,18 @@ public byte[] getKeyMetadataHash() {
return keyMetadataHash;
}
+ /**
+ * Return the hash of key metadata in Base64 encoded form.
+ * @return the encoded hash or {@code null} if no meatadata is available.
+ */
+ public String getKeyMetadataHashEncoded() {
+ byte[] hash = getKeyMetadataHash();
+ if (hash != null) {
+ return Base64.getEncoder().encodeToString(hash);
+ }
+ return null;
+ }
+
public static byte[] constructMetadataHash(String metadata) {
MessageDigest md5;
try {
@@ -209,8 +229,8 @@ public boolean equals(Object o) {
PBEKeyData that = (PBEKeyData) o;
return new EqualsBuilder()
- .append(pbe_prefix, that.pbe_prefix)
- .append(key_namespace, that.key_namespace)
+ .append(pbePrefix, that.pbePrefix)
+ .append(keyNamespace, that.keyNamespace)
.append(theKey, that.theKey)
.append(keyStatus, that.keyStatus)
.append(keyMetadata, that.keyMetadata)
@@ -220,8 +240,8 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
- .append(pbe_prefix)
- .append(key_namespace)
+ .append(pbePrefix)
+ .append(keyNamespace)
.append(theKey)
.append(keyStatus)
.append(keyMetadata)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java
index ae0f1af9d1ba..5c2254484bab 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdmin.java
@@ -17,9 +17,12 @@
*/
package org.apache.hadoop.hbase.keymeta;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
+import java.security.KeyException;
+import java.util.List;
/**
* PBEKeymetaAdmin is an interface for administrative functions related to PBE keys.
@@ -41,4 +44,15 @@ public interface PBEKeymetaAdmin {
* @throws IOException if an error occurs while enabling PBE.
*/
PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException;
+
+ /**
+ * Get the status of all the keys for the specified pbe_prefix.
+ *
+ * @param pbePrefix The prefix for the PBE key.
+ * @param keyNamespace The namespace for the PBE key.
+ * @return The list of status objects each identifying the key and its current status.
+ * @throws IOException if an error occurs while enabling PBE.
+ */
+ List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
+ throws IOException, KeyException;
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java
index e511627d35dc..6cc1bbbdfa22 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestPBEKeyStoreKeyProvider.java
@@ -138,7 +138,7 @@ private void assertPBEKeyData(PBEKeyData keyData, PBEKeyStatus expKeyStatus, byt
Map keyMetadata = GsonUtil.getDefaultInstance().fromJson(keyData.getKeyMetadata(),
HashMap.class);
assertNotNull(keyMetadata);
- assertEquals(new Bytes(prefixBytes), keyData.getPbe_prefix());
+ assertEquals(new Bytes(prefixBytes), keyData.getPBEPrefix());
assertEquals(alias, keyMetadata.get(KEY_METADATA_ALIAS));
assertEquals(Base64.getEncoder().encodeToString(prefixBytes),
keyMetadata.get(KEY_METADATA_PREFIX));
diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
index 75dfffac427c..4f0c4422b4c8 100644
--- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
@@ -40,9 +40,17 @@ message PBEAdminResponse {
required string pbe_prefix = 1;
required string key_namespace = 2;
required PBEKeyStatus pbe_status = 3;
+ optional string key_metadata = 4;
+ optional int64 refresh_timestamp = 5;
+}
+
+message PBEGetStatusResponse {
+ repeated PBEAdminResponse status = 1;
}
service PBEAdminService {
rpc EnablePBE(PBEAdminRequest)
returns (PBEAdminResponse);
+ rpc GetPBEStatuses(PBEAdminRequest)
+ returns (PBEGetStatusResponse);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
index 5d79875a2c46..0714e0b5b362 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
@@ -23,6 +23,7 @@
import java.security.KeyException;
import java.util.List;
+// TODO: Also integrate with the key provider when it is not found in the cache???
/**
* This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and
* {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first
@@ -44,7 +45,7 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) {
* @param pbePrefix The prefix of the key
* @param keyNamespace The namespace of the key
* @param keyMetadata The metadata of the key
- * @return The key data
+ * @return The key data or {@code null}
* @throws IOException if an error occurs while retrieving the key
*/
public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
@@ -52,7 +53,9 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada
PBEKeyData keyData = keyDataCache.getEntry(keyMetadata);
if (keyData == null) {
keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata);
- keyDataCache.addEntry(keyData);
+ if (keyData != null) {
+ keyDataCache.addEntry(keyData);
+ }
}
return keyData;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java
index b022177d83d6..51bba86a3c4b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyDataCache.java
@@ -51,7 +51,7 @@ public PBEKeyDataCache() {
public void addEntry(PBEKeyData pbeKeyData) {
lock.lock();
try {
- Bytes pbePrefix = new Bytes(pbeKeyData.getPbe_prefix());
+ Bytes pbePrefix = new Bytes(pbeKeyData.getPBEPrefix());
String keyNamespace = pbeKeyData.getKeyNamespace();
cache.put(pbeKeyData.getKeyMetadata(), pbeKeyData);
@@ -100,7 +100,7 @@ public PBEKeyData removeEntry(String keyMetadata) {
try {
PBEKeyData removedEntry = cache.remove(keyMetadata);
if (removedEntry != null) {
- Bytes pbePrefix = new Bytes(removedEntry.getPbe_prefix());
+ Bytes pbePrefix = new Bytes(removedEntry.getPBEPrefix());
String keyNamespace = removedEntry.getKeyNamespace();
Map> nsCache = prefixCache.get(keyNamespace);
List keyList = nsCache != null ? nsCache.get(pbePrefix) : null;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
index c005d5643b20..c5cf0615140b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
@@ -25,7 +25,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.security.KeyException;
import java.util.Base64;
+import java.util.List;
@InterfaceAudience.Private
public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin {
@@ -40,7 +42,17 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
if (! isPBEEnabled()) {
throw new IOException("PBE is currently not enabled in HBase configuration");
}
- LOG.info("Trying to enable PBE on key: {} for namespace: {}", pbePrefix, keyNamespace);
+ LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace);
+ byte[] pbe_prefix = convertToPrefixBytes(pbePrefix);
+ PBEKeyProvider provider = getKeyProvider();
+ PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace);
+ LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(),
+ pbeKey.getKeyMetadata(), pbePrefix);
+ addKey(pbeKey);
+ return pbeKey.getKeyStatus();
+ }
+
+ private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException {
byte[] pbe_prefix;
try {
pbe_prefix = Base64.getDecoder().decode(pbePrefix);
@@ -48,11 +60,18 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
catch (IllegalArgumentException e) {
throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e);
}
- PBEKeyProvider provider = getKeyProvider();
- PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace);
- LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(),
- pbeKey.getKeyMetadata(), pbePrefix);
- addKey(pbeKey);
- return pbeKey.getKeyStatus();
+ return pbe_prefix;
+ }
+
+ @Override
+ public List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
+ throws IOException, KeyException {
+ if (! isPBEEnabled()) {
+ throw new IOException("PBE is currently not enabled in HBase configuration");
+ }
+ LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix,
+ keyNamespace);
+ byte[] pbe_prefix = convertToPrefixBytes(pbePrefix);
+ return super.getAllKeys(pbe_prefix, keyNamespace);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
index 02c56d95dd28..e32ecd2c7cc5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
@@ -21,13 +21,16 @@
import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
import org.apache.hadoop.hbase.coprocessor.HasMasterServices;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminRequest;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminResponse;
+import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEGetStatusResponse;
import org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService;
+import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
@@ -35,8 +38,10 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.security.KeyException;
import java.util.Base64;
import java.util.Collections;
+import java.util.List;
/**
* This class implements a coprocessor service endpoint for the Phoenix Query Server's
@@ -77,7 +82,7 @@ public void start(CoprocessorEnvironment env) throws IOException {
/**
* Returns an iterable of the available coprocessor services, which includes the
- * {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService} implemented by
+ * {@link PBEAdminService} implemented by
* {@link org.apache.hadoop.hbase.keymeta.PBEKeymetaServiceEndpoint.KeyMetaAdminServiceImpl}.
*
* @return An iterable of the available coprocessor services.
@@ -88,11 +93,10 @@ public Iterable getServices() {
}
/**
- * The implementation of the {@link org.apache.hadoop.hbase.protobuf.generated.PBEAdminProtos.PBEAdminService}
+ * The implementation of the {@link PBEAdminProtos.PBEAdminService}
* interface, which provides the actual method implementations for enabling PBE.
*/
private class KeyMetaAdminServiceImpl extends PBEAdminService {
-
/**
* Enables PBE for a given tenant and namespace, as specified in the provided request.
*
@@ -102,19 +106,9 @@ private class KeyMetaAdminServiceImpl extends PBEAdminService {
*/
@Override
public void enablePBE(RpcController controller, PBEAdminRequest request,
- RpcCallback done) {
- PBEAdminResponse.Builder builder =
- PBEAdminResponse.newBuilder().setPbePrefix(request.getPbePrefix())
- .setKeyNamespace(request.getKeyNamespace());
- byte[] pbe_prefix = null;
- try {
- pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix());
- } catch (IllegalArgumentException e) {
- builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
- CoprocessorRpcUtils.setControllerException(controller, new IOException(
- "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e));
- }
- if (pbe_prefix != null) {
+ RpcCallback done) {
+ PBEAdminResponse.Builder builder = getResponseBuilder(controller, request);
+ if (builder.getPbePrefix() != null) {
try {
PBEKeyStatus pbeKeyStatus = master.getPBEKeymetaAdmin()
.enablePBE(request.getPbePrefix(), request.getKeyNamespace());
@@ -126,5 +120,65 @@ public void enablePBE(RpcController controller, PBEAdminRequest request,
}
done.run(builder.build());
}
+
+ @Override
+ public void getPBEStatuses(RpcController controller, PBEAdminRequest request,
+ RpcCallback done) {
+ PBEGetStatusResponse.Builder responseBuilder =
+ PBEGetStatusResponse.newBuilder();
+ PBEAdminResponse.Builder builder = getResponseBuilder(controller, request);
+ if (builder.getPbePrefix() != null) {
+ try {
+ List pbeKeyStatuses = master.getPBEKeymetaAdmin()
+ .getPBEKeyStatuses(request.getPbePrefix(), request.getKeyNamespace());
+ for (PBEKeyData keyData: pbeKeyStatuses) {
+ builder.setPbeStatus(
+ PBEAdminProtos.PBEKeyStatus.valueOf(keyData.getKeyStatus().getVal()));
+ builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.valueOf(
+ keyData.getKeyStatus().getVal()))
+ .setKeyMetadata(keyData.getKeyMetadata())
+ .setRefreshTimestamp(keyData.getRefreshTimestamp())
+ ;
+ responseBuilder.addStatus(builder.build());
+ }
+ } catch (IOException e) {
+ CoprocessorRpcUtils.setControllerException(controller, e);
+ builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
+ } catch (KeyException e) {
+ CoprocessorRpcUtils.setControllerException(controller, new IOException(e));
+ builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
+ }
+ }
+ done.run(responseBuilder.build());
+ }
+
+ private byte[] convertToPBEBytes(RpcController controller, PBEAdminRequest request,
+ PBEAdminResponse.Builder builder) {
+ byte[] pbe_prefix = null;
+ try {
+ pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix());
+ } catch (IllegalArgumentException e) {
+ builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
+ CoprocessorRpcUtils.setControllerException(controller, new IOException(
+ "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e));
+ }
+ return pbe_prefix;
+ }
+
+ private PBEAdminResponse.Builder getResponseBuilder(RpcController controller,
+ PBEAdminRequest request) {
+ PBEAdminResponse.Builder builder = PBEAdminResponse.newBuilder()
+ .setKeyNamespace(request.getKeyNamespace());
+ byte[] pbe_prefix = null;
+ try {
+ pbe_prefix = Base64.getDecoder().decode(request.getPbePrefix());
+ builder.setPbePrefixBytes(ByteString.copyFrom(pbe_prefix));
+ } catch (IllegalArgumentException e) {
+ builder.setPbeStatus(PBEAdminProtos.PBEKeyStatus.PBE_FAILED);
+ CoprocessorRpcUtils.setControllerException(controller, new IOException(
+ "Failed to decode specified prefix as Base64 string: " + request.getPbePrefix(), e));
+ }
+ return builder;
+ }
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
index 7704f7384d12..52409251500e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
@@ -35,7 +35,6 @@
import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-import javax.crypto.spec.SecretKeySpec;
import java.io.IOException;
import java.security.Key;
import java.security.KeyException;
@@ -86,7 +85,7 @@ public void addKey(PBEKeyData keyData) throws IOException {
}
}
- public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
+ protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
throws IOException, KeyException {
Connection connection = server.getConnection();
byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix,
@@ -98,16 +97,26 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
scan.addFamily(KEY_META_INFO_FAMILY);
ResultScanner scanner = table.getScanner(scan);
- List activeKeys = new ArrayList<>();
+ List allKeys = new ArrayList<>();
for (Result result : scanner) {
PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result);
- if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) {
- activeKeys.add(keyData);
+ if (keyData != null) {
+ allKeys.add(keyData);
}
}
+ return allKeys;
+ }
+ }
- return activeKeys;
+ public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
+ throws IOException, KeyException {
+ List activeKeys = new ArrayList<>();
+ for (PBEKeyData keyData : getAllKeys(pbePrefix, keyNamespace)) {
+ if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) {
+ activeKeys.add(keyData);
+ }
}
+ return activeKeys;
}
public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
@@ -144,7 +153,7 @@ private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException {
}
private byte[] constructRowKeyForMetadata(PBEKeyData keyData) {
- return constructRowKeyForMetadata(keyData.getPbe_prefix(), keyData.getKeyNamespace(),
+ return constructRowKeyForMetadata(keyData.getPBEPrefix(), keyData.getKeyNamespace(),
keyData.getKeyMetadataHash());
}
@@ -166,17 +175,30 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result
DEK_METADATA_QUAL_BYTES));
long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
REFRESHED_TIMESTAMP_QUAL_BYTES));
- byte[] dekChecksum = result.getValue(KEY_META_INFO_FAMILY, DEK_CHECKSUM_QUAL_BYTES);
byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES);
- long stkChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES));
-
- PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum);
- Key dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk,
+ Key dek = null;
+ if (dekWrappedByStk != null) {
+ long stkChecksum =
+ Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY, STK_CHECKSUM_QUAL_BYTES));
+ PBEKeyData clusterKey = server.getPBEClusterKeyCache().getClusterKeyByChecksum(stkChecksum);
+ if (clusterKey == null) {
+ LOG.error("Dropping key with metadata: {} as STK with checksum: {} is unavailable",
+ dekMetadata, stkChecksum);
+ return null;
+ }
+ dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk,
clusterKey.getTheKey());
+ }
PBEKeyData dekKeyData =
new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp);
- if (!Bytes.equals(dekKeyData.getKeyMetadataHash(), dekChecksum)) {
- throw new RuntimeException("Key has didn't match for key with metadata" + dekMetadata);
+ if (dek != null) {
+ long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
+ DEK_CHECKSUM_QUAL_BYTES));
+ if (dekKeyData.getKeyChecksum() != dekChecksum) {
+ LOG.error("Dropping key, current key checksum: {} didn't match the expected checksum: {}"
+ + " for key with metadata: {}", dekKeyData.getKeyChecksum(), dekChecksum, dekMetadata);
+ return null;
+ }
}
return dekKeyData;
}
diff --git a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
index e00f7ca138ce..656999be8071 100644
--- a/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/pbe_admin.rb
@@ -32,11 +32,21 @@ def close
end
def pbe_enable(pbe_prefix)
+ prefix, namespace = extract_prefix_info(pbe_prefix)
+ @admin.enablePBE(prefix, namespace)
+ end
+
+ def show_pbe_status(pbe_prefix)
+ prefix, namespace = extract_prefix_info(pbe_prefix)
+ @admin.getPBEKeyStatuses(prefix, namespace)
+ end
+
+ def extract_prefix_info(pbe_prefix)
prefixInfo = pbe_prefix.split(':')
raise(ArgumentError, 'Invalid prefix:namespace format') unless (prefixInfo.length == 1 ||
prefixInfo.length == 2)
- @admin.enablePBE(prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] :
- PBEKeyData::KEY_NAMESPACE_GLOBAL)
+ return prefixInfo[0], prefixInfo.length > 1 ? prefixInfo[1] :
+ PBEKeyData::KEY_NAMESPACE_GLOBAL
end
end
end
diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb
index 30e272291821..c8afe02240ef 100644
--- a/hbase-shell/src/main/ruby/shell.rb
+++ b/hbase-shell/src/main/ruby/shell.rb
@@ -626,6 +626,7 @@ def self.exception_handler(hide_traceback)
UnknownProtocolException: No registered Master Coprocessor Endpoint found for PBEAdminService",
commands: %w[
pbe_enable
+ pbe_get_statuses
]
)
diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
new file mode 100644
index 000000000000..70dd6fcd205c
--- /dev/null
+++ b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+module Shell
+ module Commands
+ class PbeGetStatuses < Command
+ def help
+ <<-EOF
+Get key statuses for a given prefix:namespace (prefix in Base64 format).
+If no namespace is specified, the global namespace (*) is used.
+EOF
+ end
+
+ def command(pbe_prefix)
+ formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', 'REFRESH-TIMESTAMP'])
+ statuses = pbe_admin.show_pbe_status(pbe_prefix)
+ statuses.each { |status|
+ formatter.row([
+ status.getPBEPrefixEncoded(),
+ status.getKeyNamespace(),
+ status.getKeyStatus().toString(),
+ status.getKeyMetadata(),
+ status.getKeyMetadataHashEncoded(),
+ status.getRefreshTimestamp()
+ ])
+ }
+ formatter.footer(statuses.size())
+ end
+ end
+ end
+end
From 56401c7199941b92f2d654e66395a115183b48f2 Mon Sep 17 00:00:00 2001
From: Hari Dara
Date: Wed, 12 Mar 2025 10:48:45 +0530
Subject: [PATCH 07/70] Operation counts
---
.../hbase/keymeta/PBEKeymetaAdminClient.java | 2 +-
.../hadoop/hbase/io/crypto/PBEKeyData.java | 50 ++++++++---
.../src/main/protobuf/server/PBEAdmin.proto | 2 +
.../hbase/keymeta/PBEKeymetaAdminImpl.java | 22 ++---
.../keymeta/PBEKeymetaServiceEndpoint.java | 2 +
.../keymeta/PBEKeymetaTableAccessor.java | 83 +++++++++++++++++--
.../ruby/shell/commands/pbe_get_statuses.rb | 7 +-
7 files changed, 135 insertions(+), 33 deletions(-)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
index b39e7768e0b3..bc39f3d37b29 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminClient.java
@@ -49,7 +49,7 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
status.getKeyNamespace(), null,
PBEKeyStatus.forValue((byte) status.getPbeStatus().getNumber()),
status.getKeyMetadata(),
- status.getRefreshTimestamp()));
+ status.getRefreshTimestamp(), status.getReadOpCount(), status.getWriteOpCount()));
}
} catch (ServiceException e) {
throw ProtobufUtil.handleRemoteException(e);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
index 34524dd6703f..b49b74659233 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyData.java
@@ -53,12 +53,14 @@
public class PBEKeyData {
public static final String KEY_NAMESPACE_GLOBAL = "*";
- private byte[] pbePrefix;
- private String keyNamespace;
- private Key theKey;
- private PBEKeyStatus keyStatus;
- private String keyMetadata;
- private long refreshTimestamp;
+ private final byte[] pbePrefix;
+ private final String keyNamespace;
+ private final Key theKey;
+ private final PBEKeyStatus keyStatus;
+ private final String keyMetadata;
+ private final long refreshTimestamp;
+ private final long readOpCount;
+ private final long writeOpCount;
private volatile long keyChecksum = 0;
private byte[] keyMetadataHash;
@@ -74,25 +76,31 @@ public class PBEKeyData {
public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus,
String keyMetadata) {
this(pbe_prefix, key_namespace, theKey, keyStatus, keyMetadata,
- EnvironmentEdgeManager.currentTime());
+ EnvironmentEdgeManager.currentTime(), 0, 0);
}
/**
* Constructs a new instance with the given parameters.
*
- * @param pbe_prefix The PBE prefix associated with the key.
- * @param theKey The actual key, can be {@code null}.
- * @param keyStatus The status of the key.
- * @param keyMetadata The metadata associated with the key.
+ * @param pbe_prefix The PBE prefix associated with the key.
+ * @param theKey The actual key, can be {@code null}.
+ * @param keyStatus The status of the key.
+ * @param keyMetadata The metadata associated with the key.
* @param refreshTimestamp The timestamp when this key was last refreshed.
+ * @param readOpCount The current number of read operations for this key.
+ * @param writeOpCount The current number of write operations for this key.
* @throws NullPointerException if any of pbe_prefix, keyStatus or keyMetadata is null.
*/
public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeyStatus keyStatus,
- String keyMetadata, long refreshTimestamp) {
+ String keyMetadata, long refreshTimestamp, long readOpCount, long writeOpCount) {
Preconditions.checkNotNull(pbe_prefix, "pbe_prefix should not be null");
Preconditions.checkNotNull(key_namespace, "key_namespace should not be null");
Preconditions.checkNotNull(keyStatus, "keyStatus should not be null");
Preconditions.checkNotNull(keyMetadata, "keyMetadata should not be null");
+ Preconditions.checkArgument(readOpCount >= 0, "readOpCount: " + readOpCount +
+ " should be >= 0");
+ Preconditions.checkArgument(writeOpCount >= 0, "writeOpCount: " + writeOpCount +
+ " should be >= 0");
this.pbePrefix = pbe_prefix;
this.keyNamespace = key_namespace;
@@ -100,6 +108,8 @@ public PBEKeyData(byte[] pbe_prefix, String key_namespace, Key theKey, PBEKeySta
this.keyStatus = keyStatus;
this.keyMetadata = keyMetadata;
this.refreshTimestamp = refreshTimestamp;
+ this.readOpCount = readOpCount;
+ this.writeOpCount = writeOpCount;
}
/**
@@ -166,6 +176,22 @@ public long getRefreshTimestamp() {
return refreshTimestamp;
}
+ /**
+ * @return the number of times this key has been used for read operations as of the time this
+ * key data was initialized.
+ */
+ public long getReadOpCount() {
+ return readOpCount;
+ }
+
+ /**
+ * @return the number of times this key has been used for write operations as of the time this
+ * key data was initialized.
+ */
+ public long getWriteOpCount() {
+ return writeOpCount;
+ }
+
/**
* Computes the checksum of the key. If the checksum has already been computed, this method
* returns the previously computed value. The checksum is computed using the CRC32C algorithm.
diff --git a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
index 4f0c4422b4c8..78c68d2a745d 100644
--- a/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/server/PBEAdmin.proto
@@ -42,6 +42,8 @@ message PBEAdminResponse {
required PBEKeyStatus pbe_status = 3;
optional string key_metadata = 4;
optional int64 refresh_timestamp = 5;
+ optional int64 read_op_count = 6;
+ optional int64 write_op_count = 7;
}
message PBEGetStatusResponse {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
index c5cf0615140b..bc09f4685a83 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
@@ -52,17 +52,6 @@ public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOEx
return pbeKey.getKeyStatus();
}
- private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException {
- byte[] pbe_prefix;
- try {
- pbe_prefix = Base64.getDecoder().decode(pbePrefix);
- }
- catch (IllegalArgumentException e) {
- throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e);
- }
- return pbe_prefix;
- }
-
@Override
public List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
throws IOException, KeyException {
@@ -74,4 +63,15 @@ public List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
byte[] pbe_prefix = convertToPrefixBytes(pbePrefix);
return super.getAllKeys(pbe_prefix, keyNamespace);
}
+
+ private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException {
+ byte[] pbe_prefix;
+ try {
+ pbe_prefix = Base64.getDecoder().decode(pbePrefix);
+ }
+ catch (IllegalArgumentException e) {
+ throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e);
+ }
+ return pbe_prefix;
+ }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
index e32ecd2c7cc5..40b614dc1a7f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaServiceEndpoint.java
@@ -138,6 +138,8 @@ public void getPBEStatuses(RpcController controller, PBEAdminRequest request,
keyData.getKeyStatus().getVal()))
.setKeyMetadata(keyData.getKeyMetadata())
.setRefreshTimestamp(keyData.getRefreshTimestamp())
+ .setReadOpCount(keyData.getReadOpCount())
+ .setWriteOpCount(keyData.getWriteOpCount())
;
responseBuilder.addStatus(builder.build());
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
index 52409251500e..ec2f25257e9c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
@@ -24,6 +24,7 @@
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
@@ -71,20 +72,39 @@ public class PBEKeymetaTableAccessor extends PBEKeyManager {
public static final String KEY_STATUS_QUAL_NAME = "key_status";
public static final byte[] KEY_STATUS_QUAL_BYTES = Bytes.toBytes(KEY_STATUS_QUAL_NAME);
+ public static final String READ_OP_COUNT_QUAL_NAME = "read_op_count";
+ public static final byte[] READ_OP_COUNT_QUAL_BYTES = Bytes.toBytes(READ_OP_COUNT_QUAL_NAME);
+
+ public static final String WRITE_OP_COUNT_QUAL_NAME = "write_op_count";
+ public static final byte[] WRITE_OP_COUNT_QUAL_BYTES = Bytes.toBytes(WRITE_OP_COUNT_QUAL_NAME);
+
public PBEKeymetaTableAccessor(Server server) {
super(server);
}
+ /**
+ * Add the specified key to the keymeta table.
+ * @param keyData The key data.
+ * @throws IOException when there is an underlying IOException.
+ */
public void addKey(PBEKeyData keyData) throws IOException {
final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)),
keyData);
-
Connection connection = server.getConnection();
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
table.put(putForMetadata);
}
}
+ /**
+ * Get all the keys for the specified pbe_prefix and key_namespace.
+ *
+ * @param pbePrefix The prefix
+ * @param keyNamespace The namespace
+ * @return a list of key data, one for each key, can be empty when none were found.
+ * @throws IOException when there is an underlying IOException.
+ * @throws KeyException when there is an underlying KeyException.
+ */
protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
throws IOException, KeyException {
Connection connection = server.getConnection();
@@ -108,6 +128,15 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
}
}
+ /**
+ * Get all the active keys for the specified pbe_prefix and key_namespace.
+ *
+ * @param pbePrefix The prefix
+ * @param keyNamespace The namespace
+ * @return a list of key data, one for each active key, can be empty when none were found.
+ * @throws IOException when there is an underlying IOException.
+ * @throws KeyException when there is an underlying KeyException.
+ */
public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
throws IOException, KeyException {
List activeKeys = new ArrayList<>();
@@ -119,6 +148,16 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
return activeKeys;
}
+ /**
+ * Get the specific key identified by pbePrefix, keyNamespace and keyMetadata.
+ *
+ * @param pbePrefix The prefix.
+ * @param keyNamespace The namespace.
+ * @param keyMetadata The metadata.
+ * @return the key or {@code null}
+ * @throws IOException when there is an underlying IOException.
+ * @throws KeyException when there is an underlying KeyException.
+ */
public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
throws IOException, KeyException {
Connection connection = server.getConnection();
@@ -130,6 +169,32 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada
}
}
+ /**
+ * Report read or write operation count on the specific key identified by pbePrefix, keyNamespace
+ * and keyMetadata. The reported value is added to the existing operation count using the
+ * Increment mutation.
+ * @param pbePrefix The prefix.
+ * @param keyNamespace The namespace.
+ * @param keyMetadata The metadata.
+ * @throws IOException when there is an underlying IOException.
+ */
+ public void reportOperation(byte[] pbePrefix, String keyNamespace, String keyMetadata, long count,
+ boolean isReadOperation) throws IOException {
+ Connection connection = server.getConnection();
+ try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
+ byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace,
+ PBEKeyData.constructMetadataHash(keyMetadata));
+ Increment incr = new Increment(rowKey)
+ .addColumn(KEY_META_INFO_FAMILY,
+ isReadOperation ? READ_OP_COUNT_QUAL_BYTES : WRITE_OP_COUNT_QUAL_BYTES,
+ count);
+ table.increment(incr);
+ }
+ }
+
+ /**
+ * Add the mutation columns to the given Put that are derived from the keyData.
+ */
private Put addMutationColumns(Put put, PBEKeyData keyData) throws IOException {
PBEKeyData latestClusterKey = server.getPBEClusterKeyCache().getLatestClusterKey();
if (keyData.getTheKey() != null) {
@@ -169,12 +234,8 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result
if (result == null || result.isEmpty()) {
return null;
}
- PBEKeyStatus keyStatus = PBEKeyStatus.forValue(
- result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]);
String dekMetadata = Bytes.toString(result.getValue(KEY_META_INFO_FAMILY,
DEK_METADATA_QUAL_BYTES));
- long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
- REFRESHED_TIMESTAMP_QUAL_BYTES));
byte[] dekWrappedByStk = result.getValue(KEY_META_INFO_FAMILY, DEK_WRAPPED_BY_STK_QUAL_BYTES);
Key dek = null;
if (dekWrappedByStk != null) {
@@ -189,8 +250,16 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result
dek = EncryptionUtil.unwrapKey(server.getConfiguration(), null, dekWrappedByStk,
clusterKey.getTheKey());
}
- PBEKeyData dekKeyData =
- new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata, refreshedTimestamp);
+ PBEKeyStatus keyStatus = PBEKeyStatus.forValue(
+ result.getValue(KEY_META_INFO_FAMILY, KEY_STATUS_QUAL_BYTES)[0]);
+ long refreshedTimestamp = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
+ REFRESHED_TIMESTAMP_QUAL_BYTES));
+ byte[] readOpValue = result.getValue(KEY_META_INFO_FAMILY, READ_OP_COUNT_QUAL_BYTES);
+ long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0;
+ byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES);
+ long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0;
+ PBEKeyData dekKeyData = new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata,
+ refreshedTimestamp, readOpCount, writeOpCount);
if (dek != null) {
long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
DEK_CHECKSUM_QUAL_BYTES));
diff --git a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
index 70dd6fcd205c..8a34b6579ff1 100644
--- a/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/pbe_get_statuses.rb
@@ -26,7 +26,8 @@ def help
end
def command(pbe_prefix)
- formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH', 'REFRESH-TIMESTAMP'])
+ formatter.header(['ENCODED-KEY', 'NAMESPACE', 'STATUS', 'METADATA', 'METADATA-HASH',
+ 'REFRESH-TIMESTAMP', 'READ-OP-COUNT', 'WRITE-OP-COUNT'])
statuses = pbe_admin.show_pbe_status(pbe_prefix)
statuses.each { |status|
formatter.row([
@@ -35,7 +36,9 @@ def command(pbe_prefix)
status.getKeyStatus().toString(),
status.getKeyMetadata(),
status.getKeyMetadataHashEncoded(),
- status.getRefreshTimestamp()
+ status.getRefreshTimestamp(),
+ status.getReadOpCount(),
+ status.getWriteOpCount()
])
}
formatter.footer(statuses.size())
From 236c9cb0543e09b4d4e4931ad52ef59e7e9a4566 Mon Sep 17 00:00:00 2001
From: Hari Dara
Date: Sat, 29 Mar 2025 19:03:06 +0530
Subject: [PATCH 08/70] Attempting broader test coverage
---
.../org/apache/hadoop/hbase/HConstants.java | 5 +
.../io/crypto/PBEKeyStoreKeyProvider.java | 29 +++---
.../hbase/io/crypto/MockPBEKeyProvider.java | 94 +++++++++++++++++++
.../apache/hadoop/hbase/HBaseServerBase.java | 2 +-
.../java/org/apache/hadoop/hbase/Server.java | 10 ++
.../hbase/keymeta/PBEClusterKeyAccessor.java | 3 +-
.../hadoop/hbase/keymeta/PBEKeyAccessor.java | 27 ++++--
...eyManager.java => PBEKeyAccessorBase.java} | 36 +++++--
.../hbase/keymeta/PBEKeymetaAdminImpl.java | 54 ++++++-----
.../keymeta/PBEKeymetaTableAccessor.java | 49 +++++-----
.../hbase/master/TestPBEClusterKey.java | 93 ++++++++++++++++++
.../hadoop/hbase/HBaseTestingUtility.java | 7 +-
12 files changed, 336 insertions(+), 73 deletions(-)
create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java
rename hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/{PBEKeyManager.java => PBEKeyAccessorBase.java} (65%)
create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 670bda523919..1649690be8ec 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1293,6 +1293,11 @@ public enum OperationStatusCode {
"hbase.crypto.pbe.master.key.name";
public static final String CRYPTO_PBE_ENABLED_CONF_KEY = "hbase.crypto.pbe.enabled";
+ public static final boolean CRYPTO_PBE_DEFAULT_ENABLED = false;
+
+ public static final String CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT =
+ "hbase.crypto.pbe.per_prefix.active_count";
+ public static final int CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT = 1;
public static final String CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX = "hbase.crypto.pbe.prefix.";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
index 4c958a30b01d..098afaa0d954 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/PBEKeyStoreKeyProvider.java
@@ -34,8 +34,7 @@ public PBEKeyData getClusterKey(byte[] clusterId) {
throw new RuntimeException("Unable to find cluster key with alias: " + masterKeyAlias);
}
// Encode clusterId too for consistency with that of PBE prefixes.
- String keyMetadata = generateKeyMetadata(masterKeyAlias,
- Base64.getEncoder().encodeToString(clusterId));
+ String keyMetadata = generateKeyMetadata(masterKeyAlias, encodeToPrefixStr(clusterId));
return new PBEKeyData(clusterId, PBEKeyData.KEY_NAMESPACE_GLOBAL, key, PBEKeyStatus.ACTIVE,
keyMetadata);
}
@@ -43,7 +42,7 @@ public PBEKeyData getClusterKey(byte[] clusterId) {
@Override
public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace) throws IOException {
checkConfig();
- String encodedPrefix = Base64.getEncoder().encodeToString(pbe_prefix);
+ String encodedPrefix = encodeToPrefixStr(pbe_prefix);
String aliasConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix + "." +
"alias";
String keyMetadata = generateKeyMetadata(conf.get(aliasConfKey, null), encodedPrefix);
@@ -58,14 +57,7 @@ public PBEKeyData unwrapKey(String keyMetadataStr) throws IOException {
String activeStatusConfKey = HConstants.CRYPTO_PBE_PREFIX_CONF_KEY_PREFIX + encodedPrefix +
".active";
boolean isActive = conf.getBoolean(activeStatusConfKey, true);
- byte[] pbe_prefix;
- try {
- pbe_prefix = Base64.getDecoder().decode(encodedPrefix);
- }
- catch (IllegalArgumentException e) {
- throw new IOException("Failed to decode specified prefix as Base64 string: " +
- encodedPrefix, e);
- }
+ byte[] pbe_prefix = decodeToPrefixBytes(encodedPrefix);
String alias = keyMetadata.get(KEY_METADATA_ALIAS);
Key key = alias != null ? getKey(alias) : null;
if (key != null) {
@@ -88,4 +80,19 @@ private void checkConfig() {
throw new IllegalStateException("initConfig is not called or config is null");
}
}
+
+ public static byte[] decodeToPrefixBytes(String pbePrefix) throws IOException {
+ byte[] pbe_prefix;
+ try {
+ pbe_prefix = Base64.getDecoder().decode(pbePrefix);
+ }
+ catch (IllegalArgumentException e) {
+ throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e);
+ }
+ return pbe_prefix;
+ }
+
+ public static String encodeToPrefixStr(byte[] pbe_prefix) {
+ return Base64.getEncoder().encodeToString(pbe_prefix);
+ }
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java
new file mode 100644
index 000000000000..6a38cdb77403
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/MockPBEKeyProvider.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.io.crypto;
+
+import java.io.IOException;
+import java.security.Key;
+import java.security.KeyStore;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+import javax.crypto.KeyGenerator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * A simple implementation of PBEKeyProvider for testing. It generates a key on demand given a
+ * prefix. One can control the state of a key by calling setKeyStatus and can rotate a key by
+ * calling setKey.
+ */
+public class MockPBEKeyProvider extends MockAesKeyProvider implements PBEKeyProvider {
+ public Map keys = new HashMap<>();
+ public Map keyStatus = new HashMap<>();
+
+ @Override public void initConfig(Configuration conf) {
+ // NO-OP
+ }
+
+ @Override public PBEKeyData getClusterKey(byte[] clusterId) throws IOException {
+ return getKey(clusterId);
+ }
+
+ @Override public PBEKeyData getPBEKey(byte[] pbe_prefix, String key_namespace)
+ throws IOException {
+ return getKey(pbe_prefix);
+ }
+
+ @Override public PBEKeyData unwrapKey(String keyAlias) throws IOException {
+ return getKey(keyAlias.getBytes());
+ }
+
+ /**
+ * Lookup the key data for the given prefix from keys. If missing, initialize one using generateSecretKey().
+ */
+ public PBEKeyData getKey(byte[] prefix_bytes) {
+ String alias = Bytes.toString(prefix_bytes);
+ Key key = keys.get(alias);
+ if (key == null) {
+ key = generateSecretKey();
+ keys.put(alias, key);
+ }
+ PBEKeyStatus keyStatus = this.keyStatus.get(alias);
+ return new PBEKeyData(prefix_bytes, PBEKeyData.KEY_NAMESPACE_GLOBAL, key,
+ keyStatus == null ? PBEKeyStatus.ACTIVE : keyStatus, Bytes.toString(prefix_bytes));
+ }
+
+ public void setKeyStatus(byte[] prefix_bytes, PBEKeyStatus status) {
+ keyStatus.put(Bytes.toString(prefix_bytes), status);
+ }
+
+ public void setKey(byte[] prefix_bytes, Key key) {
+ keys.put(Bytes.toString(prefix_bytes), key);
+ }
+
+ /**
+ * Generate a new secret key.
+ * @return the key
+ */
+ public static Key generateSecretKey() {
+ KeyGenerator keyGen = null;
+ try {
+ keyGen = KeyGenerator.getInstance("AES");
+ } catch (NoSuchAlgorithmException e) {
+ throw new RuntimeException(e);
+ }
+ keyGen.init(256);
+ return keyGen.generateKey();
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
index ddf0fe578cff..6de1e060714a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseServerBase.java
@@ -430,7 +430,7 @@ public PBEClusterKeyCache getPBEClusterKeyCache() {
}
protected void buildPBEClusterKeyCache() throws IOException {
- if (pbeClusterKeyCache == null) {
+ if (pbeClusterKeyCache == null && Server.isPBEEnabled(this)) {
pbeClusterKeyCache = PBEClusterKeyCache.createCache(new PBEClusterKeyAccessor(this));
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
index dcca89e8b2fb..f7be347cdfa7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/Server.java
@@ -122,4 +122,14 @@ default FileSystem getFileSystem() {
default boolean isStopping() {
return false;
}
+
+ /**
+ * From the given server, determine if PBE is enabbled.
+ * @return true if PBE is enabled
+ */
+ static boolean isPBEEnabled(Server server) {
+ return server.getConfiguration()
+ .getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, HConstants.CRYPTO_PBE_DEFAULT_ENABLED);
+ }
+
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
index ef62e92d93c6..83dd5a250631 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEClusterKeyAccessor.java
@@ -35,7 +35,7 @@
import static org.apache.hadoop.hbase.HConstants.CLUSTER_KEY_FILE_PREFIX;
@InterfaceAudience.Private
-public class PBEClusterKeyAccessor extends PBEKeyManager {
+public class PBEClusterKeyAccessor extends PBEKeyAccessorBase {
protected final Path clusterKeyDir;
public PBEClusterKeyAccessor(Server server) throws IOException {
@@ -77,6 +77,7 @@ public List getAllClusterKeyFiles() throws IOException {
return new ArrayList<>(clusterKeys.values());
}
+
public PBEKeyData loadClusterKey(Path keyPath) throws IOException {
PBEKeyProvider provider = getKeyProvider();
return provider.unwrapKey(loadKeyMetadata(keyPath));
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
index 0714e0b5b362..29f54a720a88 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessor.java
@@ -18,23 +18,25 @@
package org.apache.hadoop.hbase.keymeta;
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider;
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
import java.security.KeyException;
import java.util.List;
-// TODO: Also integrate with the key provider when it is not found in the cache???
/**
* This class provides a unified access on top of both {@code PBEKeyDataCache} (L1) and
* {@code PBEKeymetaTableAccessor} (L2) to access PBE keys. When the getter is called, it first
* checks if L1 cache has the key, if not, it tries to get the key from L2.
*/
@InterfaceAudience.Private
-public class PBEKeyAccessor {
+public class PBEKeyAccessor extends PBEKeyAccessorBase {
private final PBEKeyDataCache keyDataCache;
private final PBEKeymetaTableAccessor keymetaAccessor;
public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) {
+ super(keymetaAccessor.server);
this.keymetaAccessor = keymetaAccessor;
keyDataCache = new PBEKeyDataCache();
}
@@ -42,17 +44,29 @@ public PBEKeyAccessor(PBEKeymetaTableAccessor keymetaAccessor) {
/**
* Get key data by key metadata.
*
- * @param pbePrefix The prefix of the key
+ * @param pbe_prefix The prefix of the key
* @param keyNamespace The namespace of the key
* @param keyMetadata The metadata of the key
* @return The key data or {@code null}
* @throws IOException if an error occurs while retrieving the key
*/
- public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
- throws IOException, KeyException {
+ public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata)
+ throws IOException, KeyException {
+ checkPBEEnabled();
+ // 1. Check L1 cache.
PBEKeyData keyData = keyDataCache.getEntry(keyMetadata);
if (keyData == null) {
- keyData = keymetaAccessor.getKey(pbePrefix, keyNamespace, keyMetadata);
+ // 2. Check L2 cache.
+ keyData = keymetaAccessor.getKey(pbe_prefix, keyNamespace, keyMetadata);
+ if (keyData == null) {
+ // 3. Check with Key Provider.
+ PBEKeyProvider provider = getKeyProvider();
+ keyData = provider.unwrapKey(keyMetadata);
+ LOG.info("Got key data with status: {} and metadata: {} for prefix: {}",
+ keyData.getKeyStatus(), keyData.getKeyMetadata(),
+ PBEKeyStoreKeyProvider.encodeToPrefixStr(pbe_prefix));
+ keymetaAccessor.addKey(keyData);
+ }
if (keyData != null) {
keyDataCache.addEntry(keyData);
}
@@ -70,6 +84,7 @@ public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetada
*/
public PBEKeyData getAnActiveKey(byte[] pbePrefix, String keyNamespace)
throws IOException, KeyException {
+ checkPBEEnabled();
PBEKeyData keyData = keyDataCache.getRandomEntryForPrefix(pbePrefix, keyNamespace);
if (keyData == null) {
List activeKeys = keymetaAccessor.getActiveKeys(pbePrefix, keyNamespace);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java
similarity index 65%
rename from hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java
index bb426a8661b6..2839669a4f3e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeyAccessorBase.java
@@ -25,19 +25,21 @@
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
/**
- * A base class for all keymeta manager implementations.
+ * A base class for all keymeta accessor/manager implementations.
*/
@InterfaceAudience.Private
-public abstract class PBEKeyManager {
- protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyManager.class);
+public abstract class PBEKeyAccessorBase {
+ protected static final Logger LOG = LoggerFactory.getLogger(PBEKeyAccessorBase.class);
protected final Server server;
private Boolean pbeEnabled;
+ private Integer perPrefixActiveKeyCount;
- public PBEKeyManager(Server server) {
+ public PBEKeyAccessorBase(Server server) {
this.server = server;
}
@@ -62,9 +64,31 @@ protected PBEKeyProvider getKeyProvider() {
*/
protected boolean isPBEEnabled() {
if (pbeEnabled == null) {
- pbeEnabled = server.getConfiguration().getBoolean(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY,
- false);
+ pbeEnabled = Server.isPBEEnabled(server);
}
return pbeEnabled;
}
+
+ /**
+ * Check if PBE is enabled, otherwise throw exception.
+ * @throws IOException if PBE is not enabled.
+ */
+ protected void checkPBEEnabled() throws IOException {
+ if (! isPBEEnabled()) {
+ throw new IOException("PBE is currently not enabled in HBase configuration");
+ }
+ }
+
+ protected int getPerPrefixActiveKeyConfCount() throws IOException {
+ if (perPrefixActiveKeyCount == null) {
+ perPrefixActiveKeyCount = server.getConfiguration().getInt(
+ HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT,
+ HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_DEFAULT_COUNT);
+ }
+ if (perPrefixActiveKeyCount <= 0) {
+ throw new IOException("Invalid value: " + perPrefixActiveKeyCount + " configured for: " +
+ HConstants.CRYPTO_PBE_PER_PREFIX_ACTIVE_KEY_COUNT);
+ }
+ return perPrefixActiveKeyCount;
+ }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
index bc09f4685a83..08b62574944a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaAdminImpl.java
@@ -21,13 +21,15 @@
import org.apache.hadoop.hbase.io.crypto.PBEKeyData;
import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider;
import org.apache.hadoop.hbase.io.crypto.PBEKeyStatus;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyStoreKeyProvider;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.security.KeyException;
-import java.util.Base64;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
@InterfaceAudience.Private
public class PBEKeymetaAdminImpl extends PBEKeymetaTableAccessor implements PBEKeymetaAdmin {
@@ -39,39 +41,41 @@ public PBEKeymetaAdminImpl(Server server) {
@Override
public PBEKeyStatus enablePBE(String pbePrefix, String keyNamespace) throws IOException {
- if (! isPBEEnabled()) {
- throw new IOException("PBE is currently not enabled in HBase configuration");
- }
+ checkPBEEnabled();
LOG.info("Trying to enable PBE on key: {} under namespace: {}", pbePrefix, keyNamespace);
- byte[] pbe_prefix = convertToPrefixBytes(pbePrefix);
+ byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix);
PBEKeyProvider provider = getKeyProvider();
- PBEKeyData pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace);
- LOG.info("Got key data with status: {} and metadata: {} for prefix: {}", pbeKey.getKeyStatus(),
- pbeKey.getKeyMetadata(), pbePrefix);
- addKey(pbeKey);
- return pbeKey.getKeyStatus();
+ int perPrefixActiveKeyConfCount = getPerPrefixActiveKeyConfCount();
+ Set retrievedKeys = new HashSet<>(perPrefixActiveKeyConfCount);
+ PBEKeyData pbeKey = null;
+ for (int i = 0; i < perPrefixActiveKeyConfCount; ++i) {
+ pbeKey = provider.getPBEKey(pbe_prefix, keyNamespace);
+ if (pbeKey == null) {
+ throw new IOException("Invalid null PBE key received from key provider");
+ }
+ if (retrievedKeys.contains(pbeKey)) {
+ // This typically means, the key provider is not capable of producing multiple active keys.
+ LOG.info("enablePBE: configured key count per prefix: " + perPrefixActiveKeyConfCount +
+ " but received only: " + retrievedKeys.size() + " unique keys.");
+ break;
+ }
+ retrievedKeys.add(pbeKey);
+ LOG.info("enablePBE: got key data with status: {} and metadata: {} for prefix: {}",
+ pbeKey.getKeyStatus(), pbeKey.getKeyMetadata(), pbePrefix);
+ addKey(pbeKey);
+ }
+ // pbeKey can't be null at this point as perPrefixActiveKeyConfCount will always be > 0,
+ // but the null check is needed to avoid any warning.
+ return pbeKey == null ? null : pbeKey.getKeyStatus();
}
@Override
public List getPBEKeyStatuses(String pbePrefix, String keyNamespace)
throws IOException, KeyException {
- if (! isPBEEnabled()) {
- throw new IOException("PBE is currently not enabled in HBase configuration");
- }
+ checkPBEEnabled();
LOG.info("Getting key statuses for PBE on key: {} under namespace: {}", pbePrefix,
keyNamespace);
- byte[] pbe_prefix = convertToPrefixBytes(pbePrefix);
+ byte[] pbe_prefix = PBEKeyStoreKeyProvider.decodeToPrefixBytes(pbePrefix);
return super.getAllKeys(pbe_prefix, keyNamespace);
}
-
- private static byte[] convertToPrefixBytes(String pbePrefix) throws IOException {
- byte[] pbe_prefix;
- try {
- pbe_prefix = Base64.getDecoder().decode(pbePrefix);
- }
- catch (IllegalArgumentException e) {
- throw new IOException("Failed to decode specified prefix as Base64 string: " + pbePrefix, e);
- }
- return pbe_prefix;
- }
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
index ec2f25257e9c..4dfc4dbbcc42 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/keymeta/PBEKeymetaTableAccessor.java
@@ -46,7 +46,7 @@
* Accessor for PBE keymeta table.
*/
@InterfaceAudience.Private
-public class PBEKeymetaTableAccessor extends PBEKeyManager {
+public class PBEKeymetaTableAccessor extends PBEKeyAccessorBase {
private static final String KEY_META_INFO_FAMILY_STR = "info";
public static final byte[] KEY_META_INFO_FAMILY = Bytes.toBytes(KEY_META_INFO_FAMILY_STR);
@@ -88,6 +88,7 @@ public PBEKeymetaTableAccessor(Server server) {
* @throws IOException when there is an underlying IOException.
*/
public void addKey(PBEKeyData keyData) throws IOException {
+ checkPBEEnabled();
final Put putForMetadata = addMutationColumns(new Put(constructRowKeyForMetadata(keyData)),
keyData);
Connection connection = server.getConnection();
@@ -99,16 +100,17 @@ public void addKey(PBEKeyData keyData) throws IOException {
/**
* Get all the keys for the specified pbe_prefix and key_namespace.
*
- * @param pbePrefix The prefix
+ * @param pbe_prefix The prefix
* @param keyNamespace The namespace
* @return a list of key data, one for each key, can be empty when none were found.
* @throws IOException when there is an underlying IOException.
* @throws KeyException when there is an underlying KeyException.
*/
- protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
+ protected List getAllKeys(byte[] pbe_prefix, String keyNamespace)
throws IOException, KeyException {
+ checkPBEEnabled();
Connection connection = server.getConnection();
- byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbePrefix.length), pbePrefix,
+ byte[] prefixForScan = Bytes.add(Bytes.toBytes(pbe_prefix.length), pbe_prefix,
Bytes.toBytes(keyNamespace));
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
PrefixFilter prefixFilter = new PrefixFilter(prefixForScan);
@@ -119,7 +121,7 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
ResultScanner scanner = table.getScanner(scan);
List allKeys = new ArrayList<>();
for (Result result : scanner) {
- PBEKeyData keyData = parseFromResult(pbePrefix, keyNamespace, result);
+ PBEKeyData keyData = parseFromResult(pbe_prefix, keyNamespace, result);
if (keyData != null) {
allKeys.add(keyData);
}
@@ -131,16 +133,17 @@ protected List getAllKeys(byte[] pbePrefix, String keyNamespace)
/**
* Get all the active keys for the specified pbe_prefix and key_namespace.
*
- * @param pbePrefix The prefix
+ * @param pbe_prefix The prefix
* @param keyNamespace The namespace
* @return a list of key data, one for each active key, can be empty when none were found.
* @throws IOException when there is an underlying IOException.
* @throws KeyException when there is an underlying KeyException.
*/
- public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
+ public List getActiveKeys(byte[] pbe_prefix, String keyNamespace)
throws IOException, KeyException {
+ checkPBEEnabled();
List activeKeys = new ArrayList<>();
- for (PBEKeyData keyData : getAllKeys(pbePrefix, keyNamespace)) {
+ for (PBEKeyData keyData : getAllKeys(pbe_prefix, keyNamespace)) {
if (keyData.getKeyStatus() == PBEKeyStatus.ACTIVE) {
activeKeys.add(keyData);
}
@@ -149,40 +152,42 @@ public List getActiveKeys(byte[] pbePrefix, String keyNamespace)
}
/**
- * Get the specific key identified by pbePrefix, keyNamespace and keyMetadata.
+ * Get the specific key identified by pbe_prefix, keyNamespace and keyMetadata.
*
- * @param pbePrefix The prefix.
+ * @param pbe_prefix The prefix.
* @param keyNamespace The namespace.
* @param keyMetadata The metadata.
* @return the key or {@code null}
* @throws IOException when there is an underlying IOException.
* @throws KeyException when there is an underlying KeyException.
*/
- public PBEKeyData getKey(byte[] pbePrefix, String keyNamespace, String keyMetadata)
+ public PBEKeyData getKey(byte[] pbe_prefix, String keyNamespace, String keyMetadata)
throws IOException, KeyException {
+ checkPBEEnabled();
Connection connection = server.getConnection();
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
- byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace,
+ byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace,
PBEKeyData.constructMetadataHash(keyMetadata));
Result result = table.get(new Get(rowKey));
- return parseFromResult(pbePrefix, keyNamespace, result);
+ return parseFromResult(pbe_prefix, keyNamespace, result);
}
}
/**
- * Report read or write operation count on the specific key identified by pbePrefix, keyNamespace
+ * Report read or write operation count on the specific key identified by pbe_prefix, keyNamespace
* and keyMetadata. The reported value is added to the existing operation count using the
* Increment mutation.
- * @param pbePrefix The prefix.
+ * @param pbe_prefix The prefix.
* @param keyNamespace The namespace.
* @param keyMetadata The metadata.
* @throws IOException when there is an underlying IOException.
*/
- public void reportOperation(byte[] pbePrefix, String keyNamespace, String keyMetadata, long count,
+ public void reportOperation(byte[] pbe_prefix, String keyNamespace, String keyMetadata, long count,
boolean isReadOperation) throws IOException {
+ checkPBEEnabled();
Connection connection = server.getConnection();
try (Table table = connection.getTable(KEY_META_TABLE_NAME)) {
- byte[] rowKey = constructRowKeyForMetadata(pbePrefix, keyNamespace,
+ byte[] rowKey = constructRowKeyForMetadata(pbe_prefix, keyNamespace,
PBEKeyData.constructMetadataHash(keyMetadata));
Increment incr = new Increment(rowKey)
.addColumn(KEY_META_INFO_FAMILY,
@@ -222,14 +227,14 @@ private byte[] constructRowKeyForMetadata(PBEKeyData keyData) {
keyData.getKeyMetadataHash());
}
- private static byte[] constructRowKeyForMetadata(byte[] pbePrefix, String keyNamespace,
+ private static byte[] constructRowKeyForMetadata(byte[] pbe_prefix, String keyNamespace,
byte[] keyMetadataHash) {
- int prefixLength = pbePrefix.length;
- return Bytes.add(Bytes.toBytes(prefixLength), pbePrefix, Bytes.toBytesBinary(keyNamespace),
+ int prefixLength = pbe_prefix.length;
+ return Bytes.add(Bytes.toBytes(prefixLength), pbe_prefix, Bytes.toBytesBinary(keyNamespace),
keyMetadataHash);
}
- private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result result)
+ private PBEKeyData parseFromResult(byte[] pbe_prefix, String keyNamespace, Result result)
throws IOException, KeyException {
if (result == null || result.isEmpty()) {
return null;
@@ -258,7 +263,7 @@ private PBEKeyData parseFromResult(byte[] pbePrefix, String keyNamespace, Result
long readOpCount = readOpValue != null ? Bytes.toLong(readOpValue) : 0;
byte[] writeOpValue = result.getValue(KEY_META_INFO_FAMILY, WRITE_OP_COUNT_QUAL_BYTES);
long writeOpCount = writeOpValue != null ? Bytes.toLong(writeOpValue) : 0;
- PBEKeyData dekKeyData = new PBEKeyData(pbePrefix, keyNamespace, dek, keyStatus, dekMetadata,
+ PBEKeyData dekKeyData = new PBEKeyData(pbe_prefix, keyNamespace, dek, keyStatus, dekMetadata,
refreshedTimestamp, readOpCount, writeOpCount);
if (dek != null) {
long dekChecksum = Bytes.toLong(result.getValue(KEY_META_INFO_FAMILY,
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java
new file mode 100644
index 000000000000..13493da48d3f
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestPBEClusterKey.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.io.crypto.Encryption;
+import org.apache.hadoop.hbase.io.crypto.KeyProvider;
+import org.apache.hadoop.hbase.io.crypto.PBEKeyProvider;
+import org.apache.hadoop.hbase.io.crypto.MockPBEKeyProvider;
+import org.apache.hadoop.hbase.keymeta.PBEClusterKeyCache;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import java.io.IOException;
+import java.security.Key;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+@Category({ MasterTests.class, MediumTests.class })
+public class TestPBEClusterKey {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestPBEClusterKey.class);
+
+ public static final String CLUSTER_KEY_ALIAS = "cluster-key";
+ public static final byte[] CLUSTER_ID = CLUSTER_KEY_ALIAS.getBytes();
+
+
+ private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
+ private static Configuration conf = TEST_UTIL.getConfiguration();
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockPBEKeyProvider.class.getName());
+ conf.set(HConstants.CRYPTO_PBE_ENABLED_CONF_KEY, "true");
+
+ // Start the minicluster
+ TEST_UTIL.startMiniCluster(1);
+ }
+
+ @Test
+ public void testClusterKeyInitializationAndRotation() throws Exception {
+ HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
+ KeyProvider keyProvider = Encryption.getKeyProvider(master.getConfiguration());
+ assertNotNull(keyProvider);
+ assertTrue(keyProvider instanceof PBEKeyProvider);
+ assertTrue(keyProvider instanceof MockPBEKeyProvider);
+ MockPBEKeyProvider pbeKeyProvider = (MockPBEKeyProvider) keyProvider;
+ PBEClusterKeyCache pbeClusterKeyCache = master.getPBEClusterKeyCache();
+ assertNotNull(pbeClusterKeyCache);
+ assertEquals(pbeKeyProvider.getClusterKey(master.getClusterId().getBytes()),
+ pbeClusterKeyCache.getLatestClusterKey());
+
+ // Test rotation of cluster key by changing the key that the key provider provides and restart master.
+ Key newCluterKey = MockPBEKeyProvider.generateSecretKey();
+ pbeKeyProvider.setKey(master.getClusterId().getBytes(), newCluterKey);
+ TEST_UTIL.shutdownMiniCluster();
+ Thread.sleep(2000);
+ TEST_UTIL.restartHBaseCluster(1);
+ master = TEST_UTIL.getHBaseCluster().getMaster();
+ assertEquals(newCluterKey, master.getPBEClusterKeyCache().getLatestClusterKey().getTheKey());
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+}
diff --git a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index dc4bc1816acc..a15b8db5e701 100644
--- a/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-testing-util/src/main/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -89,6 +89,7 @@
import org.apache.hadoop.hbase.io.hfile.ChecksumUtil;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.RpcServerInterface;
+import org.apache.hadoop.hbase.keymeta.PBEKeymetaAdminClient;
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
import org.apache.hadoop.hbase.master.HMaster;
@@ -201,6 +202,8 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
/** This is for unit tests parameterized with a single boolean. */
public static final List