From a943652be386c85ef445f0f68f7a9cb608767853 Mon Sep 17 00:00:00 2001 From: Noritaka Sekiyama Date: Thu, 13 Aug 2020 22:04:44 +0900 Subject: [PATCH 01/12] HIVE-12679: Cherry-pick 1558b1ad12c6f32d1eab21b6e0865fec5752a2c3 --- .../org/apache/hadoop/hive/conf/HiveConf.java | 3 + .../apache/hadoop/hive/ql/metadata/Hive.java | 1 + .../metadata/HiveMetaStoreClientFactory.java | 55 +++++++++++++++++++ .../SessionHiveMetaStoreClientFactory.java | 52 ++++++++++++++++++ .../hadoop/hive/ql/metadata/TestHive.java | 45 +++++++++++++++ 5 files changed, 156 insertions(+) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index aad949d36880..985a776aeb17 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -912,6 +912,9 @@ public static enum ConfVars { HADOOP_NUM_REDUCERS("mapreduce.job.reduces", -1, "", true), // Metastore stuff. Be sure to update HiveConf.metaVars when you add something here! + METASTORE_CLIENT_FACTORY_CLASS("hive.metastore.client.factory.class", + "org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClientFactory", + "The name of the factory class that produces objects implementing the IMetaStoreClient interface."), METASTORE_DB_TYPE("hive.metastore.db.type", "DERBY", new StringSet("DERBY", "ORACLE", "MYSQL", "MSSQL", "POSTGRES"), "Type of database used by the metastore. Information schema & JDBCStorageHandler depend on it."), /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 0a783c81aaa9..4ed91acdf627 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -251,6 +251,7 @@ import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.ReflectionUtils; import org.apache.hive.common.util.HiveVersionInfo; import org.apache.thrift.TException; import org.apache.thrift.TApplicationException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java new file mode 100644 index 000000000000..13bd9ec97a99 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import java.util.concurrent.ConcurrentHashMap; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; + +/** + * Abstract factory that defines an interface for other factories that produce concrete + * MetaStoreClient objects. + * + */ +public interface HiveMetaStoreClientFactory { + + /** + * A method for producing IMetaStoreClient objects. + * + * The implementation returned by this method must throw a MetaException if allowEmbedded = true + * and it does not support embedded mode. + * + * @param conf + * Hive Configuration. + * @param hookLoader + * Hook for handling events related to tables. + * @param allowEmbedded + * Flag indicating the implementation must run in-process, e.g. for unit testing or + * "fast path". + * @param metaCallTimeMap + * A container for storing entry and exit timestamps of IMetaStoreClient method + * invocations. + * @return IMetaStoreClient An implementation of IMetaStoreClient. + * @throws MetaException + */ + IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, + boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException; +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java new file mode 100644 index 000000000000..b7bac5b16b5d --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import static com.google.common.base.Preconditions.checkNotNull; +import java.util.concurrent.ConcurrentHashMap; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; + +/** + * Default MetaStoreClientFactory for Hive which produces SessionHiveMetaStoreClient objects. + * + */ +public final class SessionHiveMetaStoreClientFactory implements HiveMetaStoreClientFactory { + + @Override + public IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, + boolean allowEmbedded, + ConcurrentHashMap metaCallTimeMap) throws MetaException { + + checkNotNull(conf, "conf cannot be null!"); + checkNotNull(hookLoader, "hookLoader cannot be null!"); + checkNotNull(metaCallTimeMap, "metaCallTimeMap cannot be null!"); + + if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { + return new SessionHiveMetaStoreClient(conf, hookLoader, allowEmbedded); + } else { + return RetryingMetaStoreClient.getProxy(conf, hookLoader, metaCallTimeMap, + SessionHiveMetaStoreClient.class.getName(), allowEmbedded); + } + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index a55551357d21..b9b0d52dbe5d 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.metadata; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; +import static org.junit.Assert.assertThat; +import static org.hamcrest.CoreMatchers.instanceOf; import java.io.OutputStream; import java.util.ArrayList; @@ -40,7 +42,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +<<<<<<< HEAD import org.apache.hadoop.hive.metastore.MetaStoreEventListener; +======= +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +>>>>>>> 1558b1ad12 (HIVE-12679: Allow users to be able to specify an implementation of IMetaStoreClient via HiveConf) import org.apache.hadoop.hive.metastore.PartitionDropOptions; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; @@ -1004,6 +1010,7 @@ public void testHiveRefreshOnConfChange() throws Throwable{ assertTrue(prevHiveObj != newHiveObj); } +<<<<<<< HEAD public void testFireInsertEvent() throws Throwable { Hive hiveDb = Hive.getWithFastCheck(hiveConf, false); String tableName = "test_fire_insert_event"; @@ -1078,6 +1085,44 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { return ""; } +======= + @Test + public void testLoadingHiveMetaStoreClientFactory() throws Throwable { + String factoryClassName = SessionHiveMetaStoreClientFactory.class.getName(); + HiveConf conf = new HiveConf(); + conf.setVar(ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); + // Make sure we instantiate the embedded version + // so the implementation chosen is SessionHiveMetaStoreClient, not a retryable version of it. + conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true); + // The current object was constructed in setUp() before we got here + // so clean that up so we can inject our own dummy implementation of IMetaStoreClient + Hive.closeCurrent(); + Hive hive = Hive.get(conf); + IMetaStoreClient tmp = hive.getMSC(); + assertNotNull("getMSC() failed.", tmp); + assertThat("Invalid default client implementation created.", tmp, + instanceOf(SessionHiveMetaStoreClient.class)); + } + + @Test + public void testLoadingInvalidHiveMetaStoreClientFactory() throws Throwable { + // Intentionally invalid class + String factoryClassName = String.class.getName(); + HiveConf conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); + // The current object was constructed in setUp() before we got here + // so clean that up so we can inject our own dummy implementation of IMetaStoreClient + Hive.closeCurrent(); + Hive hive = Hive.get(conf); + try { + hive.getMSC(); + fail("getMSC() was expected to throw MetaException."); + } catch (Exception e) { + assertTrue("getMSC() failed, which IS expected.", true); + } + } + +>>>>>>> 1558b1ad12 (HIVE-12679: Allow users to be able to specify an implementation of IMetaStoreClient via HiveConf) // shamelessly copied from Path in hadoop-2 private static final String SEPARATOR = "/"; private static final char SEPARATOR_CHAR = '/'; From 5883c9ad370454f504da57efed98e78db80dfb56 Mon Sep 17 00:00:00 2001 From: okumin Date: Tue, 20 Jun 2023 19:57:51 +0900 Subject: [PATCH 02/12] HIVE-12679: Resolve conflict --- .../test/org/apache/hadoop/hive/ql/metadata/TestHive.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index b9b0d52dbe5d..055ed8a52496 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -42,11 +42,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -<<<<<<< HEAD -import org.apache.hadoop.hive.metastore.MetaStoreEventListener; -======= import org.apache.hadoop.hive.metastore.IMetaStoreClient; ->>>>>>> 1558b1ad12 (HIVE-12679: Allow users to be able to specify an implementation of IMetaStoreClient via HiveConf) +import org.apache.hadoop.hive.metastore.MetaStoreEventListener; import org.apache.hadoop.hive.metastore.PartitionDropOptions; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.Database; @@ -1010,7 +1007,6 @@ public void testHiveRefreshOnConfChange() throws Throwable{ assertTrue(prevHiveObj != newHiveObj); } -<<<<<<< HEAD public void testFireInsertEvent() throws Throwable { Hive hiveDb = Hive.getWithFastCheck(hiveConf, false); String tableName = "test_fire_insert_event"; @@ -1085,7 +1081,6 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { return ""; } -======= @Test public void testLoadingHiveMetaStoreClientFactory() throws Throwable { String factoryClassName = SessionHiveMetaStoreClientFactory.class.getName(); @@ -1122,7 +1117,6 @@ public void testLoadingInvalidHiveMetaStoreClientFactory() throws Throwable { } } ->>>>>>> 1558b1ad12 (HIVE-12679: Allow users to be able to specify an implementation of IMetaStoreClient via HiveConf) // shamelessly copied from Path in hadoop-2 private static final String SEPARATOR = "/"; private static final char SEPARATOR_CHAR = '/'; From c1324753cf7ee7f05f5ea4fae811c7d08133b020 Mon Sep 17 00:00:00 2001 From: okumin Date: Tue, 20 Jun 2023 21:31:31 +0900 Subject: [PATCH 03/12] HIVE-12679: Follow warning of IDE --- .../hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java index 13bd9ec97a99..0a19e4538791 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java @@ -48,7 +48,7 @@ public interface HiveMetaStoreClientFactory { * A container for storing entry and exit timestamps of IMetaStoreClient method * invocations. * @return IMetaStoreClient An implementation of IMetaStoreClient. - * @throws MetaException + * @throws MetaException if this method fails to create IMetaStoreClient */ IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException; From f145e1d4e104cecfde663ba571ec596518b2c29a Mon Sep 17 00:00:00 2001 From: okumin Date: Wed, 21 Jun 2023 22:03:21 +0900 Subject: [PATCH 04/12] HIVE-12679: Move the parameter to MetastoreConf --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java | 4 ++-- .../org/apache/hadoop/hive/metastore/conf/MetastoreConf.java | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 055ed8a52496..0d6894f93e4f 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -1085,7 +1085,7 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { public void testLoadingHiveMetaStoreClientFactory() throws Throwable { String factoryClassName = SessionHiveMetaStoreClientFactory.class.getName(); HiveConf conf = new HiveConf(); - conf.setVar(ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); // Make sure we instantiate the embedded version // so the implementation chosen is SessionHiveMetaStoreClient, not a retryable version of it. conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true); @@ -1104,7 +1104,7 @@ public void testLoadingInvalidHiveMetaStoreClientFactory() throws Throwable { // Intentionally invalid class String factoryClassName = String.class.getName(); HiveConf conf = new HiveConf(); - conf.setVar(HiveConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); // The current object was constructed in setUp() before we got here // so clean that up so we can inject our own dummy implementation of IMetaStoreClient Hive.closeCurrent(); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 7911e8ddac20..a257fd21576d 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -195,6 +195,7 @@ public String toString() { ConfVars.KERBEROS_PRINCIPAL, ConfVars.USE_THRIFT_SASL, ConfVars.METASTORE_CLIENT_AUTH_MODE, + ConfVars.METASTORE_CLIENT_FACTORY_CLASS, ConfVars.METASTORE_CLIENT_PLAIN_USERNAME, ConfVars.CACHE_PINOBJTYPES, ConfVars.CONNECTION_POOLING_TYPE, @@ -1728,6 +1729,10 @@ public enum ConfVars { " and password. Any other value is ignored right now but may be used later." + "If JWT- Supported only in HTTP transport mode. If set, HMS Client will pick the value of JWT from " + "environment variable HMS_JWT and set it in Authorization header in http request"), + METASTORE_CLIENT_FACTORY_CLASS("metastore.client.factory.class", + "hive.metastore.client.factory.class", + "org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClientFactory", + "The name of the factory class that produces objects implementing the IMetaStoreClient interface."), METASTORE_CLIENT_ADDITIONAL_HEADERS("metastore.client.http.additional.headers", "hive.metastore.client.http.additional.headers", "", "Comma separated list of headers which are passed to the metastore service in the http headers"), From 33a5637dda7e2fd3ad5715361c02777239a1c0ef Mon Sep 17 00:00:00 2001 From: okumin Date: Tue, 8 Jul 2025 20:20:37 +0900 Subject: [PATCH 05/12] Remove the param from HiveConf --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 985a776aeb17..aad949d36880 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -912,9 +912,6 @@ public static enum ConfVars { HADOOP_NUM_REDUCERS("mapreduce.job.reduces", -1, "", true), // Metastore stuff. Be sure to update HiveConf.metaVars when you add something here! - METASTORE_CLIENT_FACTORY_CLASS("hive.metastore.client.factory.class", - "org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClientFactory", - "The name of the factory class that produces objects implementing the IMetaStoreClient interface."), METASTORE_DB_TYPE("hive.metastore.db.type", "DERBY", new StringSet("DERBY", "ORACLE", "MYSQL", "MSSQL", "POSTGRES"), "Type of database used by the metastore. Information schema & JDBCStorageHandler depend on it."), /** From 7dcf6c0735b01465dbe1da652573a71327f20ee4 Mon Sep 17 00:00:00 2001 From: seonggon Date: Wed, 9 Jul 2025 12:17:21 +0900 Subject: [PATCH 06/12] maybe compile ok --- .../SessionHiveMetaStoreClientFactory.java | 27 ++++++++++++++----- 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java index b7bac5b16b5d..4d88b48c14e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java @@ -18,14 +18,20 @@ package org.apache.hadoop.hive.ql.metadata; -import static com.google.common.base.Preconditions.checkNotNull; import java.util.concurrent.ConcurrentHashMap; + +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.client.HookEnabledMetaStoreClient; +import org.apache.hadoop.hive.metastore.client.SynchronizedMetaStoreClient; +import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; + +import static com.google.common.base.Preconditions.checkNotNull; /** * Default MetaStoreClientFactory for Hive which produces SessionHiveMetaStoreClient objects. @@ -35,18 +41,27 @@ public final class SessionHiveMetaStoreClientFactory implements HiveMetaStoreCli @Override public IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, - boolean allowEmbedded, - ConcurrentHashMap metaCallTimeMap) throws MetaException { + boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException { checkNotNull(conf, "conf cannot be null!"); checkNotNull(hookLoader, "hookLoader cannot be null!"); checkNotNull(metaCallTimeMap, "metaCallTimeMap cannot be null!"); + IMetaStoreClient thriftClient = ThriftHiveMetaStoreClient.newClient(conf, allowEmbedded); + IMetaStoreClient clientWithLocalCache = HiveMetaStoreClientWithLocalCache.newClient(conf, thriftClient); + IMetaStoreClient sessionLevelClient = SessionHiveMetaStoreClient.newClient(conf, clientWithLocalCache); + IMetaStoreClient clientWithHook = HookEnabledMetaStoreClient.newClient(conf, hookLoader, sessionLevelClient); + if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { - return new SessionHiveMetaStoreClient(conf, hookLoader, allowEmbedded); + return SynchronizedMetaStoreClient.newClient(conf, clientWithHook); } else { - return RetryingMetaStoreClient.getProxy(conf, hookLoader, metaCallTimeMap, - SessionHiveMetaStoreClient.class.getName(), allowEmbedded); + return RetryingMetaStoreClient.getProxy( + conf, + new Class[] {Configuration.class, IMetaStoreClient.class}, + new Object[] {conf, clientWithHook}, + metaCallTimeMap, + SynchronizedMetaStoreClient.class.getName() + ); } } } From 5712a018e76be304ac0009b412be722f892402a9 Mon Sep 17 00:00:00 2001 From: seonggon Date: Wed, 9 Jul 2025 12:18:00 +0900 Subject: [PATCH 07/12] Option1: Use metastore.client.factory.class --- .../apache/hadoop/hive/ql/metadata/Hive.java | 3 +- .../metadata/HiveMetaStoreClientFactory.java | 10 +-- .../SessionHiveMetaStoreClientFactory.java | 67 ------------------- .../ThriftHiveMetaStoreClientFactory.java | 37 ++++++++++ .../hadoop/hive/ql/metadata/TestHive.java | 2 +- 5 files changed, 40 insertions(+), 79 deletions(-) delete mode 100644 ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 4ed91acdf627..41f8dbc93324 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -130,7 +130,6 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.io.HdfsUtils; -import org.apache.hadoop.hive.metastore.HiveMetaException; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -5986,7 +5985,7 @@ public List exchangeTablePartitions(Map partitionSpec * File based store support is removed * * @returns a Meta Store Client - * @throws HiveMetaException + * @throws MetaException * if a working client can't be created */ @SuppressWarnings("squid:S2095") diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java index 0a19e4538791..c1552aacdbdb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java @@ -18,9 +18,7 @@ package org.apache.hadoop.hive.ql.metadata; -import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -39,17 +37,11 @@ public interface HiveMetaStoreClientFactory { * * @param conf * Hive Configuration. - * @param hookLoader - * Hook for handling events related to tables. * @param allowEmbedded * Flag indicating the implementation must run in-process, e.g. for unit testing or * "fast path". - * @param metaCallTimeMap - * A container for storing entry and exit timestamps of IMetaStoreClient method - * invocations. * @return IMetaStoreClient An implementation of IMetaStoreClient. * @throws MetaException if this method fails to create IMetaStoreClient */ - IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, - boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException; + IMetaStoreClient createMetaStoreClient(HiveConf conf, boolean allowEmbedded) throws MetaException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java deleted file mode 100644 index 4d88b48c14e6..000000000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClientFactory.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.metadata; - -import java.util.concurrent.ConcurrentHashMap; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.client.HookEnabledMetaStoreClient; -import org.apache.hadoop.hive.metastore.client.SynchronizedMetaStoreClient; -import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; - -import static com.google.common.base.Preconditions.checkNotNull; - -/** - * Default MetaStoreClientFactory for Hive which produces SessionHiveMetaStoreClient objects. - * - */ -public final class SessionHiveMetaStoreClientFactory implements HiveMetaStoreClientFactory { - - @Override - public IMetaStoreClient createMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, - boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException { - - checkNotNull(conf, "conf cannot be null!"); - checkNotNull(hookLoader, "hookLoader cannot be null!"); - checkNotNull(metaCallTimeMap, "metaCallTimeMap cannot be null!"); - - IMetaStoreClient thriftClient = ThriftHiveMetaStoreClient.newClient(conf, allowEmbedded); - IMetaStoreClient clientWithLocalCache = HiveMetaStoreClientWithLocalCache.newClient(conf, thriftClient); - IMetaStoreClient sessionLevelClient = SessionHiveMetaStoreClient.newClient(conf, clientWithLocalCache); - IMetaStoreClient clientWithHook = HookEnabledMetaStoreClient.newClient(conf, hookLoader, sessionLevelClient); - - if (conf.getBoolVar(ConfVars.METASTORE_FASTPATH)) { - return SynchronizedMetaStoreClient.newClient(conf, clientWithHook); - } else { - return RetryingMetaStoreClient.getProxy( - conf, - new Class[] {Configuration.class, IMetaStoreClient.class}, - new Object[] {conf, clientWithHook}, - metaCallTimeMap, - SynchronizedMetaStoreClient.class.getName() - ); - } - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java new file mode 100644 index 000000000000..ee976eb4cc5c --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.metadata; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; + +/** + * Default MetaStoreClientFactory for Hive which produces ThriftHiveMetaStoreClient objects. + * + */ +public final class ThriftHiveMetaStoreClientFactory implements HiveMetaStoreClientFactory { + + @Override + public IMetaStoreClient createMetaStoreClient(HiveConf conf, boolean allowEmbedded) throws MetaException { + + return ThriftHiveMetaStoreClient.newClient(conf, allowEmbedded); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 0d6894f93e4f..f29969b69d3c 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -1083,7 +1083,7 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { @Test public void testLoadingHiveMetaStoreClientFactory() throws Throwable { - String factoryClassName = SessionHiveMetaStoreClientFactory.class.getName(); + String factoryClassName = ThriftHiveMetaStoreClientFactory.class.getName(); HiveConf conf = new HiveConf(); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); // Make sure we instantiate the embedded version From 156e00f3a38d8e823f2dfeb2b852a62391094781 Mon Sep 17 00:00:00 2001 From: seonggon Date: Wed, 9 Jul 2025 13:15:53 +0900 Subject: [PATCH 08/12] Option 2: Use metastore.client.class --- .../apache/hadoop/hive/ql/metadata/Hive.java | 2 - .../metadata/HiveMetaStoreClientFactory.java | 73 ++++++++++++++----- .../ThriftHiveMetaStoreClientFactory.java | 37 ---------- .../hadoop/hive/ql/metadata/TestHive.java | 18 ++--- .../client/ThriftHiveMetaStoreClient.java | 2 +- .../hive/metastore/conf/MetastoreConf.java | 9 +-- 6 files changed, 65 insertions(+), 76 deletions(-) delete mode 100644 ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 41f8dbc93324..58fef48d75ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -250,7 +250,6 @@ import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.util.ReflectionUtils; import org.apache.hive.common.util.HiveVersionInfo; import org.apache.thrift.TException; import org.apache.thrift.TApplicationException; @@ -5988,7 +5987,6 @@ public List exchangeTablePartitions(Map partitionSpec * @throws MetaException * if a working client can't be created */ - @SuppressWarnings("squid:S2095") private IMetaStoreClient createMetaStoreClient(boolean allowEmbedded) throws MetaException { HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java index c1552aacdbdb..5775e46accb5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java @@ -18,30 +18,63 @@ package org.apache.hadoop.hive.ql.metadata; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.ExceptionHandler; +import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.client.HookEnabledMetaStoreClient; +import org.apache.hadoop.hive.metastore.client.SynchronizedMetaStoreClient; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.utils.JavaUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ConcurrentHashMap; /** - * Abstract factory that defines an interface for other factories that produce concrete - * MetaStoreClient objects. - * + * A factory class creating a MetaStoreClient specified in a given configuration. */ -public interface HiveMetaStoreClientFactory { - - /** - * A method for producing IMetaStoreClient objects. - * - * The implementation returned by this method must throw a MetaException if allowEmbedded = true - * and it does not support embedded mode. - * - * @param conf - * Hive Configuration. - * @param allowEmbedded - * Flag indicating the implementation must run in-process, e.g. for unit testing or - * "fast path". - * @return IMetaStoreClient An implementation of IMetaStoreClient. - * @throws MetaException if this method fails to create IMetaStoreClient - */ - IMetaStoreClient createMetaStoreClient(HiveConf conf, boolean allowEmbedded) throws MetaException; +public class HiveMetaStoreClientFactory { + private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreClientFactory.class); + + public static IMetaStoreClient newClient(HiveConf conf, HiveMetaHookLoader hookLoader, + boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException { + String mscClassName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS); + LOG.info("Using {} as a base MetaStoreClient", mscClassName); + Class mscClass = JavaUtils.getClass(mscClassName, IMetaStoreClient.class); + + IMetaStoreClient baseMetaStoreClient = null; + try { + baseMetaStoreClient = JavaUtils.newInstance(mscClass, + new Class[]{Configuration.class, boolean.class}, + new Object[]{conf, allowEmbedded}); + } catch (Throwable t) { + // Reflection by JavaUtils will throw RuntimeException, try to get real MetaException here. + Throwable rootCause = ExceptionUtils.getRootCause(t); + if (rootCause instanceof Exception) { + throw ExceptionHandler.newMetaException((Exception) rootCause); + } + throw t; + } + + IMetaStoreClient clientWithLocalCache = HiveMetaStoreClientWithLocalCache.newClient(conf, baseMetaStoreClient); + IMetaStoreClient sessionLevelClient = SessionHiveMetaStoreClient.newClient(conf, clientWithLocalCache); + IMetaStoreClient clientWithHook = HookEnabledMetaStoreClient.newClient(conf, hookLoader, sessionLevelClient); + + if (conf.getBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH)) { + return SynchronizedMetaStoreClient.newClient(conf, clientWithHook); + } else { + return RetryingMetaStoreClient.getProxy( + conf, + new Class[] {Configuration.class, IMetaStoreClient.class}, + new Object[] {conf, clientWithHook}, + metaCallTimeMap, + SynchronizedMetaStoreClient.class.getName() + ); + } + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java deleted file mode 100644 index ee976eb4cc5c..000000000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ThriftHiveMetaStoreClientFactory.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.metadata; - -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; - -/** - * Default MetaStoreClientFactory for Hive which produces ThriftHiveMetaStoreClient objects. - * - */ -public final class ThriftHiveMetaStoreClientFactory implements HiveMetaStoreClientFactory { - - @Override - public IMetaStoreClient createMetaStoreClient(HiveConf conf, boolean allowEmbedded) throws MetaException { - - return ThriftHiveMetaStoreClient.newClient(conf, allowEmbedded); - } -} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index f29969b69d3c..ebdacb6d3c50 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hive.metastore.api.WMResourcePlan; import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.events.InsertEvent; import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils; @@ -1082,29 +1083,24 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { } @Test - public void testLoadingHiveMetaStoreClientFactory() throws Throwable { - String factoryClassName = ThriftHiveMetaStoreClientFactory.class.getName(); + public void testLoadingIMetaStoreClient() throws Throwable { + String clientClassName = ThriftHiveMetaStoreClient.class.getName(); HiveConf conf = new HiveConf(); - MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); - // Make sure we instantiate the embedded version - // so the implementation chosen is SessionHiveMetaStoreClient, not a retryable version of it. - conf.setBoolVar(ConfVars.METASTORE_FASTPATH, true); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, clientClassName); // The current object was constructed in setUp() before we got here // so clean that up so we can inject our own dummy implementation of IMetaStoreClient Hive.closeCurrent(); Hive hive = Hive.get(conf); IMetaStoreClient tmp = hive.getMSC(); assertNotNull("getMSC() failed.", tmp); - assertThat("Invalid default client implementation created.", tmp, - instanceOf(SessionHiveMetaStoreClient.class)); } @Test - public void testLoadingInvalidHiveMetaStoreClientFactory() throws Throwable { + public void testLoadingInvalidIMetaStoreClient() throws Throwable { // Intentionally invalid class - String factoryClassName = String.class.getName(); + String clientClassName = String.class.getName(); HiveConf conf = new HiveConf(); - MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_FACTORY_CLASS, factoryClassName); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, clientClassName); // The current object was constructed in setUp() before we got here // so clean that up so we can inject our own dummy implementation of IMetaStoreClient Hive.closeCurrent(); diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java index 3036b9889ef9..af5aabc7ff82 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java @@ -160,7 +160,7 @@ public static ThriftHiveMetaStoreClient newClient(Configuration conf, Boolean al return new ThriftHiveMetaStoreClient(conf, allowEmbedded); } - public ThriftHiveMetaStoreClient(Configuration conf, Boolean allowEmbedded) throws MetaException { + public ThriftHiveMetaStoreClient(Configuration conf, boolean allowEmbedded) throws MetaException { super(conf); version = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST) ? TEST_VERSION : DEFAULT_VERSION; diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index a257fd21576d..87a2417673c7 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -195,7 +195,6 @@ public String toString() { ConfVars.KERBEROS_PRINCIPAL, ConfVars.USE_THRIFT_SASL, ConfVars.METASTORE_CLIENT_AUTH_MODE, - ConfVars.METASTORE_CLIENT_FACTORY_CLASS, ConfVars.METASTORE_CLIENT_PLAIN_USERNAME, ConfVars.CACHE_PINOBJTYPES, ConfVars.CONNECTION_POOLING_TYPE, @@ -1729,10 +1728,10 @@ public enum ConfVars { " and password. Any other value is ignored right now but may be used later." + "If JWT- Supported only in HTTP transport mode. If set, HMS Client will pick the value of JWT from " + "environment variable HMS_JWT and set it in Authorization header in http request"), - METASTORE_CLIENT_FACTORY_CLASS("metastore.client.factory.class", - "hive.metastore.client.factory.class", - "org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClientFactory", - "The name of the factory class that produces objects implementing the IMetaStoreClient interface."), + METASTORE_CLIENT_CLASS("metastore.client.class", + "hive.metastore.client.class", + "org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient", + "The name of MetaStoreClient class that implements the IMetaStoreClient interface."), METASTORE_CLIENT_ADDITIONAL_HEADERS("metastore.client.http.additional.headers", "hive.metastore.client.http.additional.headers", "", "Comma separated list of headers which are passed to the metastore service in the http headers"), From 004f13e0211d44952b94160c6750c537b4028062 Mon Sep 17 00:00:00 2001 From: seonggon Date: Thu, 10 Jul 2025 19:42:45 +0900 Subject: [PATCH 09/12] Move HiveMetaStoreClientFactory from hive-exec to standalone-metastore-common --- .../apache/hadoop/hive/ql/metadata/Hive.java | 4 + .../hive/metastore/HiveMetaStoreClient.java | 107 ++++++++++++++---- .../client}/HiveMetaStoreClientFactory.java | 36 ++---- 3 files changed, 97 insertions(+), 50 deletions(-) rename {ql/src/java/org/apache/hadoop/hive/ql/metadata => standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client}/HiveMetaStoreClientFactory.java (57%) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 58fef48d75ab..21bfad912023 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -125,6 +125,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.Batchable; import org.apache.hadoop.hive.metastore.client.builder.HiveMetaStoreClientBuilder; +import org.apache.hadoop.hive.metastore.client.HiveMetaStoreClientFactory; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.RetryUtilities; import org.apache.hadoop.hive.ql.Context; @@ -136,6 +137,7 @@ import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.PartitionDropOptions; +import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AggrStats; @@ -5987,6 +5989,7 @@ public List exchangeTablePartitions(Map partitionSpec * @throws MetaException * if a working client can't be created */ + @SuppressWarnings("squid:S2095") private IMetaStoreClient createMetaStoreClient(boolean allowEmbedded) throws MetaException { HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() { @@ -5999,6 +6002,7 @@ public HiveMetaHook getHook( } }; + // SG:FIXME HiveMetaStoreClientBuilder msClientBuilder = new HiveMetaStoreClientBuilder(conf) .newThriftClient(allowEmbedded) .enhanceWith(client -> diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 3bc5993e5371..2913db8dda8a 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; +import org.apache.hadoop.hive.metastore.client.HiveMetaStoreClientFactory; import org.apache.hadoop.hive.metastore.client.MetaStoreClientWrapper; import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; import org.apache.hadoop.hive.metastore.client.builder.HiveMetaStoreClientBuilder; @@ -61,17 +62,22 @@ public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader) th public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { - this(conf, hookLoader, new ThriftHiveMetaStoreClient(conf, allowEmbedded)); + this(conf, hookLoader, HiveMetaStoreClientFactory.newClient(conf, allowEmbedded)); } private HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, - ThriftHiveMetaStoreClient thriftClient) { - super(createUnderlyingClient(conf, hookLoader, thriftClient), conf); - this.thriftClient = thriftClient; + IMetaStoreClient baseMetaStoreClient) { + super(createUnderlyingClient(conf, hookLoader, baseMetaStoreClient), conf); + + if (baseMetaStoreClient instanceof ThriftHiveMetaStoreClient) { + this.thriftClient = (ThriftHiveMetaStoreClient) baseMetaStoreClient; + } else { + this.thriftClient = null; + } } private static IMetaStoreClient createUnderlyingClient(Configuration conf, HiveMetaHookLoader hookLoader, - ThriftHiveMetaStoreClient thriftClient) { + IMetaStoreClient thriftClient) { return new HiveMetaStoreClientBuilder(conf) .client(thriftClient) .withHooks(hookLoader) @@ -82,19 +88,35 @@ private static IMetaStoreClient createUnderlyingClient(Configuration conf, HiveM // methods for test public boolean createType(Type type) throws TException { - return thriftClient.createType(type); + if (thriftClient != null) { + return thriftClient.createType(type); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropType(String type) throws TException { - return thriftClient.dropType(type); + if (thriftClient != null) { + return thriftClient.dropType(type); + } else { + throw new UnsupportedOperationException(); + } } public Type getType(String name) throws TException { - return thriftClient.getType(name); + if (thriftClient != null) { + return thriftClient.getType(name); + } else { + throw new UnsupportedOperationException(); + } } public Map getTypeAll(String name) throws TException { - return thriftClient.getTypeAll(name); + if (thriftClient != null) { + return thriftClient.getTypeAll(name); + } else { + throw new UnsupportedOperationException(); + } } public void createTable(Table tbl, EnvironmentContext envContext) throws TException { @@ -107,56 +129,99 @@ public void createTable(Table tbl, EnvironmentContext envContext) throws TExcept public Table getTable(String catName, String dbName, String tableName, boolean getColumnStats, String engine) throws TException { - return thriftClient.getTable(catName, dbName, tableName, getColumnStats, engine); + if (thriftClient != null) { + return thriftClient.getTable(catName, dbName, tableName, getColumnStats, engine); + } else { + throw new UnsupportedOperationException(); + } } public void dropTable(String catName, String dbname, String name, boolean deleteData, boolean ignoreUnknownTab, EnvironmentContext envContext) throws TException { - thriftClient.dropTable(catName, dbname, name, deleteData, ignoreUnknownTab, envContext); + if (thriftClient != null) { + thriftClient.dropTable(catName, dbname, name, deleteData, ignoreUnknownTab, envContext); + } else { + throw new UnsupportedOperationException(); + } } public Partition add_partition(Partition new_part, EnvironmentContext envContext) throws TException { - return thriftClient.add_partition(new_part, envContext); + if (thriftClient != null) { + return thriftClient.add_partition(new_part, envContext); + } else { + throw new UnsupportedOperationException(); + } } public Partition appendPartition(String dbName, String tableName, List partVals, EnvironmentContext ec) throws TException { - return thriftClient.appendPartition(dbName, tableName, partVals, ec); + if (thriftClient != null) { + return thriftClient.appendPartition(dbName, tableName, partVals, ec); + } else { + throw new UnsupportedOperationException(); + } } public Partition appendPartitionByName(String dbName, String tableName, String partName) throws TException { - return thriftClient.appendPartitionByName(dbName, tableName, partName); + if (thriftClient != null) { + return thriftClient.appendPartitionByName(dbName, tableName, partName); + } else { + throw new UnsupportedOperationException(); + } } public Partition appendPartitionByName(String dbName, String tableName, String partName, EnvironmentContext envContext) throws TException { - return thriftClient.appendPartitionByName(dbName, tableName, partName, envContext); + if (thriftClient != null) { + return thriftClient.appendPartitionByName(dbName, tableName, partName, envContext); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropPartition(String db_name, String tbl_name, List part_vals, EnvironmentContext env_context) throws TException { - return thriftClient.dropPartition(db_name, tbl_name, part_vals, env_context); + if (thriftClient != null) { + return thriftClient.dropPartition(db_name, tbl_name, part_vals, env_context); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropPartition(String dbName, String tableName, String partName, boolean dropData, EnvironmentContext ec) throws TException { - return thriftClient.dropPartition(dbName, tableName, partName, dropData, ec); + if (thriftClient != null) { + return thriftClient.dropPartition(dbName, tableName, partName, dropData, ec); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropPartition(String dbName, String tableName, List partVals) throws TException { - return thriftClient.dropPartition(dbName, tableName, partVals); + if (thriftClient != null) { + return thriftClient.dropPartition(dbName, tableName, partVals); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData) throws TException { - return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData); + if (thriftClient != null) { + return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData); + } else { + throw new UnsupportedOperationException(); + } } public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData, EnvironmentContext envContext) throws TException { - return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData, envContext); - + if (thriftClient != null) { + return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData, envContext); + } else { + throw new UnsupportedOperationException(); + } } @VisibleForTesting diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java similarity index 57% rename from ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java rename to standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java index 5775e46accb5..e38f9836f172 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMetaStoreClientFactory.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java @@ -16,33 +16,24 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.metadata; +package org.apache.hadoop.hive.metastore.client; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.ExceptionHandler; -import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.client.HookEnabledMetaStoreClient; -import org.apache.hadoop.hive.metastore.client.SynchronizedMetaStoreClient; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.concurrent.ConcurrentHashMap; - /** * A factory class creating a MetaStoreClient specified in a given configuration. */ public class HiveMetaStoreClientFactory { private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreClientFactory.class); - public static IMetaStoreClient newClient(HiveConf conf, HiveMetaHookLoader hookLoader, - boolean allowEmbedded, ConcurrentHashMap metaCallTimeMap) throws MetaException { + public static IMetaStoreClient newClient(Configuration conf, boolean allowEmbedded) throws MetaException { String mscClassName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS); LOG.info("Using {} as a base MetaStoreClient", mscClassName); Class mscClass = JavaUtils.getClass(mscClassName, IMetaStoreClient.class); @@ -55,26 +46,13 @@ public static IMetaStoreClient newClient(HiveConf conf, HiveMetaHookLoader hookL } catch (Throwable t) { // Reflection by JavaUtils will throw RuntimeException, try to get real MetaException here. Throwable rootCause = ExceptionUtils.getRootCause(t); - if (rootCause instanceof Exception) { - throw ExceptionHandler.newMetaException((Exception) rootCause); + if (rootCause instanceof MetaException) { + throw (MetaException) rootCause; + } else { + throw new MetaException(rootCause.getMessage()); } - throw t; } - IMetaStoreClient clientWithLocalCache = HiveMetaStoreClientWithLocalCache.newClient(conf, baseMetaStoreClient); - IMetaStoreClient sessionLevelClient = SessionHiveMetaStoreClient.newClient(conf, clientWithLocalCache); - IMetaStoreClient clientWithHook = HookEnabledMetaStoreClient.newClient(conf, hookLoader, sessionLevelClient); - - if (conf.getBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH)) { - return SynchronizedMetaStoreClient.newClient(conf, clientWithHook); - } else { - return RetryingMetaStoreClient.getProxy( - conf, - new Class[] {Configuration.class, IMetaStoreClient.class}, - new Object[] {conf, clientWithHook}, - metaCallTimeMap, - SynchronizedMetaStoreClient.class.getName() - ); - } + return baseMetaStoreClient; } } From 01421d25d7dae0b906913eb9d08ae096ea680cbc Mon Sep 17 00:00:00 2001 From: seonggon Date: Fri, 11 Jul 2025 10:25:20 +0900 Subject: [PATCH 10/12] minor --- .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 2913db8dda8a..62535da87feb 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -77,9 +77,9 @@ private HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, } private static IMetaStoreClient createUnderlyingClient(Configuration conf, HiveMetaHookLoader hookLoader, - IMetaStoreClient thriftClient) { + IMetaStoreClient baseMetaStoreClient) { return new HiveMetaStoreClientBuilder(conf) - .client(thriftClient) + .client(baseMetaStoreClient) .withHooks(hookLoader) .threadSafe() .build(); From fe11b25b20774e2ef30e7709d2b085e9e5addd25 Mon Sep 17 00:00:00 2001 From: seonggon Date: Wed, 16 Jul 2025 14:31:04 +0900 Subject: [PATCH 11/12] compile ok --- .../apache/hadoop/hive/ql/metadata/Hive.java | 173 +++++++++--------- .../hive/metastore/HiveMetaStoreClient.java | 3 +- .../client/ThriftHiveMetaStoreClient.java | 5 - .../builder/HiveMetaStoreClientBuilder.java | 96 ++++++---- .../client/HiveMetaStoreClientFactory.java | 58 ------ 5 files changed, 144 insertions(+), 191 deletions(-) delete mode 100644 standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 21bfad912023..eec284a93274 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -21,68 +21,13 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; - -import static org.apache.hadoop.hive.common.AcidConstants.SOFT_DELETE_TABLE; -import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_SCAN_SPECIFIC_PARTITIONS; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_WRITE_NOTIFICATION_MAX_BATCH_SIZE; -import static org.apache.hadoop.hive.metastore.HiveMetaHook.HIVE_ICEBERG_STORAGE_HANDLER; -import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.CTAS_LEGACY_CONFIG; -import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; -import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.convertToGetPartitionsByNamesRequest; -import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; -import static org.apache.hadoop.hive.ql.ddl.DDLUtils.isIcebergStatsSource; -import static org.apache.hadoop.hive.ql.ddl.DDLUtils.isIcebergTable; -import static org.apache.hadoop.hive.ql.io.AcidUtils.getFullTableName; -import static org.apache.hadoop.hive.ql.metadata.RewriteAlgorithm.CALCITE; -import static org.apache.hadoop.hive.ql.metadata.RewriteAlgorithm.ALL; -import static org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveMaterializedViewUtils.extractTable; -import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT; -import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.PrintStream; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.BitSet; -import java.util.Collections; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import javax.annotation.Nullable; -import javax.jdo.JDODataStoreException; - -import com.google.common.collect.ImmutableList; - import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.lang3.ObjectUtils; @@ -96,6 +41,7 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hive.common.DataCopyStatistics; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.MaterializationSnapshot; @@ -104,42 +50,26 @@ import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidWriteIdList; -import org.apache.hadoop.hive.common.DataCopyStatistics; import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; import org.apache.hadoop.hive.common.log.InPlaceUpdate; import org.apache.hadoop.hive.common.type.SnapshotContext; import org.apache.hadoop.hive.conf.Constants; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest; -import org.apache.hadoop.hive.metastore.api.CompactionRequest; -import org.apache.hadoop.hive.metastore.api.CreateTableRequest; -import org.apache.hadoop.hive.metastore.api.GetFunctionsRequest; -import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesRequest; -import org.apache.hadoop.hive.metastore.api.GetPartitionsRequest; -import org.apache.hadoop.hive.metastore.api.GetPartitionsResponse; -import org.apache.hadoop.hive.metastore.api.GetTableRequest; -import org.apache.hadoop.hive.metastore.api.SourceTable; -import org.apache.hadoop.hive.metastore.api.UpdateTransactionalStatsRequest; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Batchable; -import org.apache.hadoop.hive.metastore.client.builder.HiveMetaStoreClientBuilder; -import org.apache.hadoop.hive.metastore.client.HiveMetaStoreClientFactory; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.utils.RetryUtilities; -import org.apache.hadoop.hive.ql.Context; -import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; -import org.apache.hadoop.hive.ql.io.HdfsUtils; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.PartitionDropOptions; -import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; +import org.apache.hadoop.hive.metastore.ReplChangeManager; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.AbortCompactResponse; +import org.apache.hadoop.hive.metastore.api.AbortCompactionRequest; +import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; @@ -149,8 +79,10 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; +import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionResponse; import org.apache.hadoop.hive.metastore.api.CompactionType; +import org.apache.hadoop.hive.metastore.api.CreateTableRequest; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest; @@ -161,17 +93,21 @@ import org.apache.hadoop.hive.metastore.api.FireEventRequestData; import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest; import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.GetFunctionsRequest; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; import org.apache.hadoop.hive.metastore.api.GetPartitionNamesPsRequest; import org.apache.hadoop.hive.metastore.api.GetPartitionNamesPsResponse; import org.apache.hadoop.hive.metastore.api.GetPartitionRequest; -import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec; import org.apache.hadoop.hive.metastore.api.GetPartitionResponse; +import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesRequest; +import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec; import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest; import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthResponse; +import org.apache.hadoop.hive.metastore.api.GetPartitionsRequest; +import org.apache.hadoop.hive.metastore.api.GetPartitionsResponse; import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest; import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.metastore.api.GetTableRequest; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; @@ -180,6 +116,7 @@ import org.apache.hadoop.hive.metastore.api.Materialization; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.MetadataPpdResult; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest; import org.apache.hadoop.hive.metastore.api.PartitionFilterMode; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -202,7 +139,9 @@ import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.SourceTable; import org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest; +import org.apache.hadoop.hive.metastore.api.UpdateTransactionalStatsRequest; import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan; import org.apache.hadoop.hive.metastore.api.WMMapping; import org.apache.hadoop.hive.metastore.api.WMNullablePool; @@ -213,21 +152,25 @@ import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; import org.apache.hadoop.hive.metastore.api.WriteNotificationLogBatchRequest; import org.apache.hadoop.hive.metastore.api.WriteNotificationLogRequest; -import org.apache.hadoop.hive.metastore.api.AbortCompactionRequest; -import org.apache.hadoop.hive.metastore.api.AbortCompactResponse; -import org.apache.hadoop.hive.metastore.ReplChangeManager; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.metastore.client.builder.HiveMetaStoreClientBuilder; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.utils.RetryUtilities; +import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseDesc; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.FunctionUtils; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; -import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.PartitionDetails; -import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.AcidUtils.TableSnapshot; +import org.apache.hadoop.hive.ql.io.AcidUtils; +import org.apache.hadoop.hive.ql.io.HdfsUtils; import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; import org.apache.hadoop.hive.ql.lockmgr.LockException; @@ -235,13 +178,13 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveMaterializedViewUtils; import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec; import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec; +import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.plan.LoadTableDesc.LoadFileType; +import org.apache.hadoop.hive.ql.plan.LoadTableDesc; import org.apache.hadoop.hive.ql.session.CreateTableAutomaticGrant; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.Deserializer; @@ -253,11 +196,63 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.HiveVersionInfo; -import org.apache.thrift.TException; import org.apache.thrift.TApplicationException; +import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.PrintStream; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.BitSet; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map.Entry; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import javax.jdo.JDODataStoreException; + +import static org.apache.hadoop.hive.common.AcidConstants.SOFT_DELETE_TABLE; +import static org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW; +import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_SCAN_SPECIFIC_PARTITIONS; +import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_WRITE_NOTIFICATION_MAX_BATCH_SIZE; +import static org.apache.hadoop.hive.metastore.HiveMetaHook.HIVE_ICEBERG_STORAGE_HANDLER; +import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.CTAS_LEGACY_CONFIG; +import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.convertToGetPartitionsByNamesRequest; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; +import static org.apache.hadoop.hive.ql.ddl.DDLUtils.isIcebergStatsSource; +import static org.apache.hadoop.hive.ql.ddl.DDLUtils.isIcebergTable; +import static org.apache.hadoop.hive.ql.io.AcidUtils.getFullTableName; +import static org.apache.hadoop.hive.ql.metadata.RewriteAlgorithm.ALL; +import static org.apache.hadoop.hive.ql.metadata.RewriteAlgorithm.CALCITE; +import static org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveMaterializedViewUtils.extractTable; +import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT; +import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME; + /** * This class has functions that implement meta data/DDL operations using calls * to the metastore. @@ -5989,7 +5984,6 @@ public List exchangeTablePartitions(Map partitionSpec * @throws MetaException * if a working client can't be created */ - @SuppressWarnings("squid:S2095") private IMetaStoreClient createMetaStoreClient(boolean allowEmbedded) throws MetaException { HiveMetaHookLoader hookLoader = new HiveMetaHookLoader() { @@ -6002,9 +5996,8 @@ public HiveMetaHook getHook( } }; - // SG:FIXME HiveMetaStoreClientBuilder msClientBuilder = new HiveMetaStoreClientBuilder(conf) - .newThriftClient(allowEmbedded) + .newClient(allowEmbedded) .enhanceWith(client -> HiveMetaStoreClientWithLocalCache.newClient(conf, client)) .enhanceWith(client -> diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 62535da87feb..2f2cb19d2066 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; -import org.apache.hadoop.hive.metastore.client.HiveMetaStoreClientFactory; import org.apache.hadoop.hive.metastore.client.MetaStoreClientWrapper; import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; import org.apache.hadoop.hive.metastore.client.builder.HiveMetaStoreClientBuilder; @@ -62,7 +61,7 @@ public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader) th public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { - this(conf, hookLoader, HiveMetaStoreClientFactory.newClient(conf, allowEmbedded)); + this(conf, hookLoader, HiveMetaStoreClientBuilder.createClient(conf, allowEmbedded)); } private HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java index af5aabc7ff82..a2aafd0f3da3 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java @@ -155,11 +155,6 @@ public class ThriftHiveMetaStoreClient extends BaseMetaStoreClient { private static final Logger LOG = LoggerFactory.getLogger(ThriftHiveMetaStoreClient.class); - public static ThriftHiveMetaStoreClient newClient(Configuration conf, Boolean allowEmbedded) - throws MetaException { - return new ThriftHiveMetaStoreClient(conf, allowEmbedded); - } - public ThriftHiveMetaStoreClient(Configuration conf, boolean allowEmbedded) throws MetaException { super(conf); version = diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java index ab213e3ea6d9..b08f0506106d 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java @@ -18,65 +18,89 @@ package org.apache.hadoop.hive.metastore.client.builder; +import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.client.HookEnabledMetaStoreClient; import org.apache.hadoop.hive.metastore.client.SynchronizedMetaStoreClient; import org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.utils.JavaUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.Map; import java.util.Objects; import java.util.function.Function; public class HiveMetaStoreClientBuilder { + private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreClientBuilder.class); - private final Configuration conf; - private IMetaStoreClient client; + private final Configuration conf; + private IMetaStoreClient client; - public HiveMetaStoreClientBuilder(Configuration conf) { - this.conf = Objects.requireNonNull(conf); - } + public HiveMetaStoreClientBuilder(Configuration conf) { + this.conf = Objects.requireNonNull(conf); + } - public HiveMetaStoreClientBuilder newClient() throws MetaException { - this.client = new HiveMetaStoreClient(conf); - return this; - } + public HiveMetaStoreClientBuilder newClient(boolean allowEmbedded) throws MetaException { + this.client = createClient(conf, allowEmbedded); + return this; + } - public HiveMetaStoreClientBuilder newThriftClient(boolean allowEmbedded) throws MetaException { - this.client = ThriftHiveMetaStoreClient.newClient(conf, allowEmbedded); - return this; - } + public HiveMetaStoreClientBuilder client(IMetaStoreClient client) { + this.client = client; + return this; + } - public HiveMetaStoreClientBuilder client(IMetaStoreClient client) { - this.client = client; - return this; - } + public HiveMetaStoreClientBuilder enhanceWith(Function wrapperFunction) { + client = wrapperFunction.apply(client); + return this; + } - public HiveMetaStoreClientBuilder enhanceWith(Function wrapperFunction) { - client = wrapperFunction.apply(client); - return this; - } + public HiveMetaStoreClientBuilder withHooks(HiveMetaHookLoader hookLoader) { + this.client = HookEnabledMetaStoreClient.newClient(conf, hookLoader, client); + return this; + } - public HiveMetaStoreClientBuilder withHooks(HiveMetaHookLoader hookLoader) { - this.client = HookEnabledMetaStoreClient.newClient(conf, hookLoader, client); - return this; - } + public HiveMetaStoreClientBuilder withRetry(Map metaCallTimeMap) throws MetaException { + client = RetryingMetaStoreClient.getProxy(conf, metaCallTimeMap, client); + return this; + } - public HiveMetaStoreClientBuilder withRetry(Map metaCallTimeMap) throws MetaException { - client = RetryingMetaStoreClient.getProxy(conf, metaCallTimeMap, client); - return this; - } + public HiveMetaStoreClientBuilder threadSafe() { + this.client = SynchronizedMetaStoreClient.newClient(conf, client); + return this; + } - public HiveMetaStoreClientBuilder threadSafe() { - this.client = SynchronizedMetaStoreClient.newClient(conf, client); - return this; - } + public IMetaStoreClient build() { + return Objects.requireNonNull(client); + } - public IMetaStoreClient build() { - return Objects.requireNonNull(client); + public static IMetaStoreClient createClient(Configuration conf, boolean allowEmbedded) throws MetaException { + Class mscClass = MetastoreConf.getClass( + conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, + ThriftHiveMetaStoreClient.class, IMetaStoreClient.class); + LOG.info("Using {} as a base MetaStoreClient", mscClass.getName()); + + IMetaStoreClient baseMetaStoreClient = null; + try { + baseMetaStoreClient = JavaUtils.newInstance(mscClass, + new Class[]{Configuration.class, boolean.class}, + new Object[]{conf, allowEmbedded}); + } catch (Throwable t) { + // Reflection by JavaUtils will throw RuntimeException, try to get real MetaException here. + Throwable rootCause = ExceptionUtils.getRootCause(t); + if (rootCause instanceof MetaException) { + throw (MetaException) rootCause; + } else { + throw new MetaException(rootCause.getMessage()); + } } + + return baseMetaStoreClient; + } } diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java deleted file mode 100644 index e38f9836f172..000000000000 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/client/HiveMetaStoreClientFactory.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.metastore.client; - -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; -import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.conf.MetastoreConf; -import org.apache.hadoop.hive.metastore.utils.JavaUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A factory class creating a MetaStoreClient specified in a given configuration. - */ -public class HiveMetaStoreClientFactory { - private static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreClientFactory.class); - - public static IMetaStoreClient newClient(Configuration conf, boolean allowEmbedded) throws MetaException { - String mscClassName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS); - LOG.info("Using {} as a base MetaStoreClient", mscClassName); - Class mscClass = JavaUtils.getClass(mscClassName, IMetaStoreClient.class); - - IMetaStoreClient baseMetaStoreClient = null; - try { - baseMetaStoreClient = JavaUtils.newInstance(mscClass, - new Class[]{Configuration.class, boolean.class}, - new Object[]{conf, allowEmbedded}); - } catch (Throwable t) { - // Reflection by JavaUtils will throw RuntimeException, try to get real MetaException here. - Throwable rootCause = ExceptionUtils.getRootCause(t); - if (rootCause instanceof MetaException) { - throw (MetaException) rootCause; - } else { - throw new MetaException(rootCause.getMessage()); - } - } - - return baseMetaStoreClient; - } -} From 166e897907bb617232edc77b0890712dcccb07cc Mon Sep 17 00:00:00 2001 From: seonggon Date: Fri, 25 Jul 2025 13:57:34 +0900 Subject: [PATCH 12/12] Address review comments --- .../hadoop/hive/ql/metadata/TestHive.java | 5 +- .../hive/metastore/HiveMetaStoreClient.java | 108 +++++------------- .../builder/HiveMetaStoreClientBuilder.java | 4 +- .../hive/metastore/conf/MetastoreConf.java | 4 +- 4 files changed, 37 insertions(+), 84 deletions(-) diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index ebdacb6d3c50..55b628ac1112 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -20,7 +20,6 @@ import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; import static org.junit.Assert.assertThat; -import static org.hamcrest.CoreMatchers.instanceOf; import java.io.OutputStream; import java.util.ArrayList; @@ -1086,7 +1085,7 @@ private String getFileCheckSum(FileSystem fileSystem, Path p) throws Exception { public void testLoadingIMetaStoreClient() throws Throwable { String clientClassName = ThriftHiveMetaStoreClient.class.getName(); HiveConf conf = new HiveConf(); - MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, clientClassName); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_IMPL, clientClassName); // The current object was constructed in setUp() before we got here // so clean that up so we can inject our own dummy implementation of IMetaStoreClient Hive.closeCurrent(); @@ -1100,7 +1099,7 @@ public void testLoadingInvalidIMetaStoreClient() throws Throwable { // Intentionally invalid class String clientClassName = String.class.getName(); HiveConf conf = new HiveConf(); - MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, clientClassName); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_IMPL, clientClassName); // The current object was constructed in setUp() before we got here // so clean that up so we can inject our own dummy implementation of IMetaStoreClient Hive.closeCurrent(); diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 2f2cb19d2066..122c0c3c491d 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -49,7 +49,7 @@ public class HiveMetaStoreClient extends MetaStoreClientWrapper implements IMeta public static final String MANUALLY_INITIATED_COMPACTION = "manual"; public static final String RENAME_PARTITION_MAKE_COPY = "renamePartitionMakeCopy"; - private final ThriftHiveMetaStoreClient thriftClient; + private ThriftHiveMetaStoreClient thriftClient = null; public HiveMetaStoreClient(Configuration conf) throws MetaException { this(conf, null, true); @@ -61,17 +61,15 @@ public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader) th public HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { - this(conf, hookLoader, HiveMetaStoreClientBuilder.createClient(conf, allowEmbedded)); + this(conf, hookLoader, new HiveMetaStoreClientBuilder(conf).newClient(allowEmbedded).build()); } private HiveMetaStoreClient(Configuration conf, HiveMetaHookLoader hookLoader, IMetaStoreClient baseMetaStoreClient) { super(createUnderlyingClient(conf, hookLoader, baseMetaStoreClient), conf); - if (baseMetaStoreClient instanceof ThriftHiveMetaStoreClient) { - this.thriftClient = (ThriftHiveMetaStoreClient) baseMetaStoreClient; - } else { - this.thriftClient = null; + if (baseMetaStoreClient instanceof ThriftHiveMetaStoreClient metaStoreClient) { + this.thriftClient = metaStoreClient; } } @@ -86,36 +84,33 @@ private static IMetaStoreClient createUnderlyingClient(Configuration conf, HiveM // methods for test - public boolean createType(Type type) throws TException { + @FunctionalInterface + private interface ThriftCallable { + T call() throws TException; + } + + private T doCall(ThriftCallable callable) throws TException { if (thriftClient != null) { - return thriftClient.createType(type); + return callable.call(); } else { throw new UnsupportedOperationException(); } } + public boolean createType(Type type) throws TException { + return doCall(() -> thriftClient.createType(type)); + } + public boolean dropType(String type) throws TException { - if (thriftClient != null) { - return thriftClient.dropType(type); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropType(type)); } public Type getType(String name) throws TException { - if (thriftClient != null) { - return thriftClient.getType(name); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.getType(name)); } public Map getTypeAll(String name) throws TException { - if (thriftClient != null) { - return thriftClient.getTypeAll(name); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.getTypeAll(name)); } public void createTable(Table tbl, EnvironmentContext envContext) throws TException { @@ -128,99 +123,58 @@ public void createTable(Table tbl, EnvironmentContext envContext) throws TExcept public Table getTable(String catName, String dbName, String tableName, boolean getColumnStats, String engine) throws TException { - if (thriftClient != null) { - return thriftClient.getTable(catName, dbName, tableName, getColumnStats, engine); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.getTable(catName, dbName, tableName, getColumnStats, engine)); } public void dropTable(String catName, String dbname, String name, boolean deleteData, boolean ignoreUnknownTab, EnvironmentContext envContext) throws TException { - if (thriftClient != null) { + doCall(() -> { thriftClient.dropTable(catName, dbname, name, deleteData, ignoreUnknownTab, envContext); - } else { - throw new UnsupportedOperationException(); - } + return null; + }); } public Partition add_partition(Partition new_part, EnvironmentContext envContext) throws TException { - if (thriftClient != null) { - return thriftClient.add_partition(new_part, envContext); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.add_partition(new_part, envContext)); } public Partition appendPartition(String dbName, String tableName, List partVals, EnvironmentContext ec) throws TException { - if (thriftClient != null) { - return thriftClient.appendPartition(dbName, tableName, partVals, ec); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.appendPartition(dbName, tableName, partVals, ec)); } public Partition appendPartitionByName(String dbName, String tableName, String partName) throws TException { - if (thriftClient != null) { - return thriftClient.appendPartitionByName(dbName, tableName, partName); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.appendPartitionByName(dbName, tableName, partName)); } public Partition appendPartitionByName(String dbName, String tableName, String partName, EnvironmentContext envContext) throws TException { - if (thriftClient != null) { - return thriftClient.appendPartitionByName(dbName, tableName, partName, envContext); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.appendPartitionByName(dbName, tableName, partName, envContext)); } public boolean dropPartition(String db_name, String tbl_name, List part_vals, EnvironmentContext env_context) throws TException { - if (thriftClient != null) { - return thriftClient.dropPartition(db_name, tbl_name, part_vals, env_context); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropPartition(db_name, tbl_name, part_vals, env_context)); } public boolean dropPartition(String dbName, String tableName, String partName, boolean dropData, EnvironmentContext ec) throws TException { - if (thriftClient != null) { - return thriftClient.dropPartition(dbName, tableName, partName, dropData, ec); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropPartition(dbName, tableName, partName, dropData, ec)); } public boolean dropPartition(String dbName, String tableName, List partVals) throws TException { - if (thriftClient != null) { - return thriftClient.dropPartition(dbName, tableName, partVals); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropPartition(dbName, tableName, partVals)); } public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData) throws TException { - if (thriftClient != null) { - return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData)); } public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData, EnvironmentContext envContext) throws TException { - if (thriftClient != null) { - return thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData, envContext); - } else { - throw new UnsupportedOperationException(); - } + return doCall(() -> thriftClient.dropPartitionByName(dbName, tableName, partName, deleteData, envContext)); } @VisibleForTesting diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java index b08f0506106d..903a3543ee29 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/builder/HiveMetaStoreClientBuilder.java @@ -80,9 +80,9 @@ public IMetaStoreClient build() { return Objects.requireNonNull(client); } - public static IMetaStoreClient createClient(Configuration conf, boolean allowEmbedded) throws MetaException { + private static IMetaStoreClient createClient(Configuration conf, boolean allowEmbedded) throws MetaException { Class mscClass = MetastoreConf.getClass( - conf, MetastoreConf.ConfVars.METASTORE_CLIENT_CLASS, + conf, MetastoreConf.ConfVars.METASTORE_CLIENT_IMPL, ThriftHiveMetaStoreClient.class, IMetaStoreClient.class); LOG.info("Using {} as a base MetaStoreClient", mscClass.getName()); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 87a2417673c7..aaa24372f939 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -1728,8 +1728,8 @@ public enum ConfVars { " and password. Any other value is ignored right now but may be used later." + "If JWT- Supported only in HTTP transport mode. If set, HMS Client will pick the value of JWT from " + "environment variable HMS_JWT and set it in Authorization header in http request"), - METASTORE_CLIENT_CLASS("metastore.client.class", - "hive.metastore.client.class", + METASTORE_CLIENT_IMPL("metastore.client.impl", + "hive.metastore.client.impl", "org.apache.hadoop.hive.metastore.client.ThriftHiveMetaStoreClient", "The name of MetaStoreClient class that implements the IMetaStoreClient interface."), METASTORE_CLIENT_ADDITIONAL_HEADERS("metastore.client.http.additional.headers",