Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import java.io.IOException;
import java.security.PrivilegedExceptionAction;

public interface Authenticator {

<T> T doAs(PrivilegedExceptionAction<T> action) throws IOException;

default <T> void doAsNoReturn(Runnable action) throws IOException {
doAs(() -> {
action.run();
return null;
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import java.io.IOException;
import java.security.PrivilegedExceptionAction;

public interface HadoopAuthenticator {
public interface HadoopAuthenticator extends Authenticator {

UserGroupInformation getUGI() throws IOException;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.doris.common.Version;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.io.Writable;
import org.apache.doris.common.security.authentication.Authenticator;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.es.EsExternalDatabase;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
Expand Down Expand Up @@ -78,6 +79,7 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
Expand Down Expand Up @@ -157,6 +159,23 @@ public Configuration getConfiguration() {
return conf;
}

/**
* get authenticator for catalog
* return a dummy authenticator by default
*/
public synchronized Authenticator getAuthenticator() {
return new Authenticator() {
@Override
public <T> T doAs(PrivilegedExceptionAction<T> action) throws IOException {
try {
return action.run();
} catch (Exception e) {
throw new IOException(e);
}
}
};
}

/**
* set some default properties when creating catalog
* @return list of database names in this catalog
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import lombok.Getter;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.logging.log4j.LogManager;
Expand Down Expand Up @@ -70,7 +69,6 @@ public class HMSExternalCatalog extends ExternalCatalog {

private static final int FILE_SYSTEM_EXECUTOR_THREAD_NUM = 16;
private ThreadPoolExecutor fileSystemExecutor;
@Getter
private HadoopAuthenticator authenticator;

@VisibleForTesting
Expand Down Expand Up @@ -263,7 +261,12 @@ public String getHiveMetastoreUris() {
return catalogProperty.getOrDefault(HMSProperties.HIVE_METASTORE_URIS, "");
}

public String getHiveVersion() {
return catalogProperty.getOrDefault(HMSProperties.HIVE_VERSION, "");
@Override
public synchronized HadoopAuthenticator getAuthenticator() {
if (authenticator == null) {
AuthenticationConfig config = AuthenticationConfig.getKerberosConfig(getConfiguration());
authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}
return authenticator;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.doris.analysis.SlotRef;
import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
Expand All @@ -41,7 +40,6 @@
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TExprOpcode;

Expand Down Expand Up @@ -813,11 +811,6 @@ public static Schema getHudiTableSchema(HMSExternalTable table) {
return hudiSchema;
}

public static <T> T ugiDoAs(long catalogId, PrivilegedExceptionAction<T> action) {
return ugiDoAs(((ExternalCatalog) Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId)).getConfiguration(),
action);
}

public static <T> T ugiDoAs(Configuration conf, PrivilegedExceptionAction<T> action) {
// if hive config is not ready, then use hadoop kerberos to login
AuthenticationConfig krbConfig = AuthenticationConfig.getKerberosConfig(conf,
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,11 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.TableIdentifier;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -83,4 +86,25 @@ protected void initS3Param(Configuration conf) {
Map<String, String> properties = catalogProperty.getHadoopProperties();
conf.set(Constants.AWS_CREDENTIALS_PROVIDER, PropertyConverter.getAWSCredentialsProviders(properties));
}

public Table loadTable(TableIdentifier of) {
Table tbl = getCatalog().loadTable(of);
Map<String, String> extProps = getProperties();
initIcebergTableFileIO(tbl, extProps);
return tbl;
}

public static void initIcebergTableFileIO(Table table, Map<String, String> props) {
Map<String, String> ioConf = new HashMap<>();
table.properties().forEach((key, value) -> {
if (key.startsWith("io.")) {
ioConf.put(key, value);
}
});

// This `initialize` method will directly override the properties as a whole,
// so we need to merge the table's io-related properties with the doris's catalog-related properties
props.putAll(ioConf);
table.io().initialize(props);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
package org.apache.doris.datasource.iceberg;

import org.apache.doris.datasource.CatalogProperty;
import org.apache.doris.datasource.iceberg.hive.IcebergHiveCatalog;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.constants.HMSProperties;

import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.hive.HiveCatalog;

import java.util.Map;

Expand All @@ -38,7 +38,7 @@ public IcebergHMSExternalCatalog(long catalogId, String name, String resource, M
@Override
protected void initCatalog() {
icebergCatalogType = ICEBERG_HMS;
HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog();
IcebergHiveCatalog hiveCatalog = new IcebergHiveCatalog();
hiveCatalog.setConf(getConfiguration());
// initialize hive catalog
Map<String, String> catalogProperties = catalogProperty.getProperties();
Expand Down
Loading