diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/metastore/HMSBaseProperties.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/metastore/HMSBaseProperties.java index 9056aae6ee7201..54019cffe3143d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/metastore/HMSBaseProperties.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/metastore/HMSBaseProperties.java @@ -159,6 +159,7 @@ private void initHadoopAuthenticator() { } if (this.hiveMetastoreAuthenticationType.equalsIgnoreCase("kerberos")) { hiveConf.set("hadoop.security.authentication", "kerberos"); + hiveConf.set("hive.metastore.sasl.enabled", "true"); KerberosAuthenticationConfig authenticationConfig = new KerberosAuthenticationConfig( this.hiveMetastoreClientPrincipal, this.hiveMetastoreClientKeytab, hiveConf); this.hmsAuthenticator = HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig); @@ -175,6 +176,7 @@ private void initHadoopAuthenticator() { KerberosAuthenticationConfig authenticationConfig = new KerberosAuthenticationConfig( this.hdfsKerberosPrincipal, this.hdfsKerberosKeytab, hiveConf); hiveConf.set("hadoop.security.authentication", "kerberos"); + hiveConf.set("hive.metastore.sasl.enabled", "true"); this.hmsAuthenticator = HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig); return; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/storage/HdfsProperties.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/storage/HdfsProperties.java index 17efc8e78d5a20..1ef4c5f921d119 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/storage/HdfsProperties.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/storage/HdfsProperties.java @@ -174,6 +174,7 @@ private void initBackendConfigProperties() { } props.put("hdfs.security.authentication", hdfsAuthenticationType); if ("kerberos".equalsIgnoreCase(hdfsAuthenticationType)) { + props.put("hadoop.security.authentication", "kerberos"); props.put("hadoop.kerberos.principal", hdfsKerberosPrincipal); props.put("hadoop.kerberos.keytab", hdfsKerberosKeytab); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/property/storage/HdfsPropertiesTest.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/property/storage/HdfsPropertiesTest.java index 01e430fb84118f..b8ba275e9cfdf5 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/property/storage/HdfsPropertiesTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/property/storage/HdfsPropertiesTest.java @@ -19,6 +19,8 @@ import org.apache.doris.common.Config; import org.apache.doris.common.UserException; +import org.apache.doris.common.security.authentication.HadoopKerberosAuthenticator; +import org.apache.doris.common.security.authentication.HadoopSimpleAuthenticator; import org.apache.doris.datasource.property.storage.exception.StoragePropertiesException; import com.google.common.collect.Maps; @@ -38,12 +40,13 @@ public void testBasicHdfsCreate() throws UserException { // Test 1: Check default authentication type (should be "simple") Map simpleHdfsProperties = new HashMap<>(); simpleHdfsProperties.put("uri", "hdfs://test/1.orc"); - Assertions.assertEquals(HdfsProperties.class, StorageProperties.createPrimary(simpleHdfsProperties).getClass()); + Assertions.assertEquals(HdfsProperties.class, StorageProperties.createPrimary(simpleHdfsProperties).getClass()); Map origProps = createBaseHdfsProperties(); List storageProperties = StorageProperties.createAll(origProps); HdfsProperties hdfsProperties = (HdfsProperties) storageProperties.get(0); Configuration conf = hdfsProperties.getHadoopStorageConfig(); Assertions.assertEquals("simple", conf.get("hadoop.security.authentication")); + Assertions.assertEquals(HadoopSimpleAuthenticator.class, hdfsProperties.getHadoopAuthenticator().getClass()); // Test 2: Kerberos without necessary configurations (should throw exception) origProps.put("hdfs.authentication.type", "kerberos"); @@ -61,6 +64,7 @@ public void testBasicHdfsCreate() throws UserException { Assertions.assertEquals("kerberos", configuration.get("hdfs.security.authentication")); Assertions.assertEquals("hadoop", configuration.get("hadoop.kerberos.principal")); Assertions.assertEquals("keytab", configuration.get("hadoop.kerberos.keytab")); + Assertions.assertEquals(HadoopKerberosAuthenticator.class, properties.hadoopAuthenticator.getClass()); } @Test diff --git a/regression-test/suites/external_table_p2/paimon/test_paimon_hms_catalog.groovy b/regression-test/suites/external_table_p2/paimon/test_paimon_hms_catalog.groovy index 790f1095d4e602..4ff84a27d0d297 100644 --- a/regression-test/suites/external_table_p2/paimon/test_paimon_hms_catalog.groovy +++ b/regression-test/suites/external_table_p2/paimon/test_paimon_hms_catalog.groovy @@ -126,12 +126,18 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") { "hadoop.kerberos.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", "hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" """ + + String hdfs_new_kerberos_properties = """ + "fs.defaultFS" = "hdfs://${extHiveHmsHost}:8520", + "hdfs.authentication.type" = "kerberos", + "hdfs.authentication.kerberos.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", + "hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" + """ String hms_kerberos_new_prop = """ "hive.metastore.uris" = "thrift://${extHiveHmsHost}:9583", "hive.metastore.client.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", "hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab", "hive.metastore.service.principal" = "hive/hadoop-master@LABS.TERADATA.COM", - "hive.metastore.sasl.enabled " = "true", "hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*// @@ -182,6 +188,7 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") { """ testQuery(paimon_hms_catalog_properties + hdfs_warehouse_properties + hdfs_storage_properties, "hdfs", "hdfs_db") testQuery(paimon_hms_type_prop + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "hdfs_kerberos", "hdfs_db") + testQuery(paimon_hms_type_prop + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "hdfs_new_kerberos", "hdfs_db") testQuery(paimon_hms_catalog_properties + oss_warehouse_properties + oss_storage_properties, "oss", "ali_db") testQuery(paimon_hms_catalog_properties + obs_warehouse_properties + obs_storage_properties, "obs", "hw_db") testQuery(paimon_hms_catalog_properties + cos_warehouse_properties + cos_storage_properties, "cos", "tx_db") @@ -209,6 +216,7 @@ suite("test_paimon_hms_catalog", "p2,external,paimon,new_catalog_property") { """ testQuery(paimon_hms_catalog_properties + paimon_fs_hdfs_support + hdfs_warehouse_properties + hdfs_storage_properties, "support_hdfs", "hdfs_db") testQuery(paimon_hms_type_prop + paimon_fs_hdfs_support + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "support_hdfs_kerberos", "hdfs_db") + testQuery(paimon_hms_type_prop + paimon_fs_hdfs_support + hdfs_warehouse_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "support_hdfs_new_kerberos", "hdfs_db") testQuery(paimon_hms_catalog_properties + paimon_fs_oss_support + oss_warehouse_properties + oss_storage_properties, "support_oss", "ali_db") testQuery(paimon_hms_catalog_properties + paimon_fs_obs_support + obs_warehouse_properties + obs_storage_properties, "support_obs", "hw_db") testQuery(paimon_hms_catalog_properties + paimon_fs_cos_support + cos_warehouse_properties + cos_storage_properties, "support_cos", "tx_db") diff --git a/regression-test/suites/external_table_p2/refactor_catalog_param/hive_on_hms_and_dlf.groovy b/regression-test/suites/external_table_p2/refactor_catalog_param/hive_on_hms_and_dlf.groovy index f0d711c7a8cffa..2d116d07b317e3 100644 --- a/regression-test/suites/external_table_p2/refactor_catalog_param/hive_on_hms_and_dlf.groovy +++ b/regression-test/suites/external_table_p2/refactor_catalog_param/hive_on_hms_and_dlf.groovy @@ -412,6 +412,13 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") { "hadoop.kerberos.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", "hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" """ + String hdfs_new_kerberos_properties = """ + "fs.defaultFS" = "hdfs://${externalEnvIp}:8520", + "hdfs.authentication.type" = "kerberos", + "hdfs.authentication.kerberos.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", + "hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" + """ + String dlf_access_key = context.config.otherConfigs.get("dlf_access_key") String dlf_secret_key = context.config.otherConfigs.get("dlf_secret_key") /**************** DLF *******************/ @@ -432,7 +439,6 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") { RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*// DEFAULT", - "hive.metastore.sasl.enabled " = "true", "hive.metastore.kerberos.principal" = "hive/hadoop-master@LABS.TERADATA.COM", """ @@ -442,7 +448,6 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") { "hive.metastore.client.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", "hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab", "hive.metastore.service.principal" = "hive/hadoop-master@LABS.TERADATA.COM", - "hive.metastore.sasl.enabled " = "true", "hive.metastore.authentication.type"="kerberos", "hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*// @@ -538,6 +543,7 @@ suite("hive_on_hms_and_dlf", "p2,external,new_catalog_property") { db_location = "hdfs://${externalEnvIp}:8520/hive/hms/" + System.currentTimeMillis() testQueryAndInsert(hms_type_properties + hms_kerberos_new_prop + hdfs_kerberos_properties, "hive_hms_hdfs_kerberos_test", db_location) + testQueryAndInsert(hms_type_properties + hms_kerberos_new_prop + hdfs_new_kerberos_properties, "hive_hms_hdfs_new_kerberos_test", db_location) /**************** DLF *******************/ String dlf_warehouse = "oss://selectdb-qa-datalake-test/hive-dlf-oss-warehouse" diff --git a/regression-test/suites/external_table_p2/refactor_catalog_param/iceberg_on_hms_and_filesystem_and_dlf.groovy b/regression-test/suites/external_table_p2/refactor_catalog_param/iceberg_on_hms_and_filesystem_and_dlf.groovy index 45b18889767cf4..21947e609a9f59 100644 --- a/regression-test/suites/external_table_p2/refactor_catalog_param/iceberg_on_hms_and_filesystem_and_dlf.groovy +++ b/regression-test/suites/external_table_p2/refactor_catalog_param/iceberg_on_hms_and_filesystem_and_dlf.groovy @@ -308,6 +308,15 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property "hadoop.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" """ + String hdfs_new_kerberos_properties = """ + "fs.defaultFS" = "hdfs://${externalEnvIp}:8520", + "io-impl" = "org.apache.doris.datasource.iceberg.fileio.DelegateFileIO", + "hdfs.authentication.type" = "kerberos", + "hdfs.authentication.kerberos.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", + "hdfs.authentication.kerberos.keytab" = "${keytab_root_dir}/hive-presto-master.keytab" + """ + + String hms_prop = """ 'hive.metastore.uris' = 'thrift://${externalEnvIp}:9383', """ @@ -321,13 +330,11 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERREALM.COM)s/@.*// DEFAULT", - "hive.metastore.sasl.enabled " = "true", "hive.metastore.kerberos.principal" = "hive/hadoop-master@LABS.TERADATA.COM", """ String hms_kerberos_old_prop_not_include_kerberos_prop = """ "hive.metastore.uris" = "thrift://${externalEnvIp}:9583", - "hive.metastore.sasl.enabled " = "true", "hive.metastore.kerberos.principal" = "hive/hadoop-master@LABS.TERADATA.COM", """ @@ -336,7 +343,6 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property "hive.metastore.client.principal"="hive/presto-master.docker.cluster@LABS.TERADATA.COM", "hive.metastore.client.keytab" = "${keytab_root_dir}/hive-presto-master.keytab", "hive.metastore.service.principal" = "hive/hadoop-master@LABS.TERADATA.COM", - "hive.metastore.sasl.enabled " = "true", "hive.metastore.authentication.type"="kerberos", "hadoop.security.auth_to_local" = "RULE:[2:\\\$1@\\\$0](.*@LABS.TERADATA.COM)s/@.*// RULE:[2:\\\$1@\\\$0](.*@OTHERLABS.TERADATA.COM)s/@.*// @@ -437,6 +443,8 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_old_prop_not_include_kerberos_prop + warehouse + hdfs_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_old") //new kerberos testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_new_prop + warehouse + hdfs_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_hdfs") + //new hdfs kerberos + testQueryAndInsert(iceberg_hms_type_prop + hms_kerberos_new_prop + warehouse + hdfs_new_kerberos_properties, "iceberg_hms_on_hdfs_kerberos_hdfs") /*--------HMS END-----------*/ @@ -500,6 +508,7 @@ suite("iceberg_on_hms_and_filesystem_and_dlf", "p2,external,new_catalog_property 'warehouse' = 'hdfs://${externalEnvIp}:8520/iceberg-fs-hdfs-warehouse', """ testQueryAndInsert(iceberg_file_system_catalog_properties + warehouse + hdfs_kerberos_properties, "iceberg_fs_on_hdfs_kerberos") + testQueryAndInsert(iceberg_file_system_catalog_properties + warehouse + hdfs_new_kerberos_properties, "iceberg_fs_on_hdfs_new_kerberos") /* *//** S3 **/