Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common.security.authentication;

import java.io.IOException;
import java.security.PrivilegedExceptionAction;

public interface Authenticator {

<T> T doAs(PrivilegedExceptionAction<T> action) throws IOException;

default <T> void doAsNoReturn(Runnable action) throws IOException {
doAs(() -> {
action.run();
return null;
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import java.io.IOException;
import java.security.PrivilegedExceptionAction;

public interface HadoopAuthenticator {
public interface HadoopAuthenticator extends Authenticator {

UserGroupInformation getUGI() throws IOException;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private static UserGroupInformation loginWithUGI(AuthenticationConfig config) {
}
if (config instanceof KerberosAuthenticationConfig) {
try {
// TODO: remove after iceberg and hudi kerberos test case pass
// TODO: remove after hudi kerberos test case pass
try {
// login hadoop with keytab and try checking TGT
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@

package org.apache.doris.common.security.authentication;

import com.google.gson.annotations.SerializedName;
import lombok.Data;

@Data
public class SimpleAuthenticationConfig extends AuthenticationConfig {
@SerializedName(value = "username")
private String username;

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.doris.common.Version;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.io.Writable;
import org.apache.doris.common.security.authentication.Authenticator;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.es.EsExternalDatabase;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
Expand Down Expand Up @@ -78,6 +79,7 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
Expand Down Expand Up @@ -158,6 +160,23 @@ public Configuration getConfiguration() {
return conf;
}

/**
* get authenticator for catalog
* return a dummy authenticator by default
*/
public synchronized Authenticator getAuthenticator() {
return new Authenticator() {
@Override
public <T> T doAs(PrivilegedExceptionAction<T> action) throws IOException {
try {
return action.run();
} catch (Exception e) {
throw new IOException(e);
}
}
};
}

/**
* set some default properties when creating catalog
* @return list of database names in this catalog
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import lombok.Getter;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.logging.log4j.LogManager;
Expand Down Expand Up @@ -70,7 +69,7 @@ public class HMSExternalCatalog extends ExternalCatalog {

private static final int FILE_SYSTEM_EXECUTOR_THREAD_NUM = 16;
private ThreadPoolExecutor fileSystemExecutor;
@Getter

private HadoopAuthenticator authenticator;

@VisibleForTesting
Expand All @@ -86,8 +85,15 @@ public HMSExternalCatalog(long catalogId, String name, String resource, Map<Stri
super(catalogId, name, InitCatalogLog.Type.HMS, comment);
props = PropertyConverter.convertToMetaProperties(props);
catalogProperty = new CatalogProperty(resource, props);
AuthenticationConfig config = AuthenticationConfig.getKerberosConfig(getConfiguration());
authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}

@Override
public synchronized HadoopAuthenticator getAuthenticator() {
if (authenticator == null) {
AuthenticationConfig config = AuthenticationConfig.getKerberosConfig(getConfiguration());
authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}
return authenticator;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.doris.analysis.SlotRef;
import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
Expand All @@ -41,7 +40,6 @@
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopUGI;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TExprOpcode;

Expand Down Expand Up @@ -813,11 +811,6 @@ public static Schema getHudiTableSchema(HMSExternalTable table) {
return hudiSchema;
}

public static <T> T ugiDoAs(long catalogId, PrivilegedExceptionAction<T> action) {
return ugiDoAs(((ExternalCatalog) Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId)).getConfiguration(),
action);
}

public static <T> T ugiDoAs(Configuration conf, PrivilegedExceptionAction<T> action) {
// if hive config is not ready, then use hadoop kerberos to login
AuthenticationConfig krbConfig = AuthenticationConfig.getKerberosConfig(conf,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,12 @@

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.TableIdentifier;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -59,7 +63,7 @@ protected void initLocalObjectsImpl() {

public Catalog getCatalog() {
makeSureInitialized();
return ((IcebergMetadataOps) metadataOps).getCatalog();
return catalog;
}

public String getIcebergCatalogType() {
Expand All @@ -83,4 +87,25 @@ protected void initS3Param(Configuration conf) {
Map<String, String> properties = catalogProperty.getHadoopProperties();
conf.set(Constants.AWS_CREDENTIALS_PROVIDER, PropertyConverter.getAWSCredentialsProviders(properties));
}

public Table loadTable(TableIdentifier of) {
Table tbl = getCatalog().loadTable(of);
Map<String, String> extProps = getProperties();
initIcebergTableFileIO(tbl, extProps);
return tbl;
}

public static void initIcebergTableFileIO(Table table, Map<String, String> props) {
Map<String, String> ioConf = new HashMap<>();
table.properties().forEach((key, value) -> {
if (key.startsWith("io.")) {
ioConf.put(key, value);
}
});

// This `initialize` method will directly override the properties as a whole,
// so we need to merge the table's io-related properties with the doris's catalog-related properties
props.putAll(ioConf);
table.io().initialize(props);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,41 @@

package org.apache.doris.datasource.iceberg;

import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import org.apache.doris.datasource.CatalogProperty;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.nereids.exceptions.AnalysisException;

import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.hive.HiveCatalog;

import java.io.IOException;
import java.util.Map;

public class IcebergHMSExternalCatalog extends IcebergExternalCatalog {

private HadoopAuthenticator authenticator;

public IcebergHMSExternalCatalog(long catalogId, String name, String resource, Map<String, String> props,
String comment) {
super(catalogId, name, comment);
props = PropertyConverter.convertToMetaProperties(props);
catalogProperty = new CatalogProperty(resource, props);
}

@Override
public synchronized HadoopAuthenticator getAuthenticator() {
if (authenticator == null) {
AuthenticationConfig config = AuthenticationConfig.getKerberosConfig(getConfiguration());
authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}
return authenticator;
}

@Override
protected void initCatalog() {
icebergCatalogType = ICEBERG_HMS;
Expand All @@ -44,8 +61,32 @@ protected void initCatalog() {
Map<String, String> catalogProperties = catalogProperty.getProperties();
String metastoreUris = catalogProperty.getOrDefault(HMSProperties.HIVE_METASTORE_URIS, "");
catalogProperties.put(CatalogProperties.URI, metastoreUris);
hiveCatalog.initialize(icebergCatalogType, catalogProperties);
try {
getAuthenticator().doAsNoReturn(() -> hiveCatalog.initialize(icebergCatalogType, catalogProperties));
} catch (IOException e) {
throw new AnalysisException(e.getMessage(), e);
}
catalog = hiveCatalog;
}

public Table loadTable(TableIdentifier of) {
// // todo
// HiveOperations operations = new HiveOperations(
// FileSystemFactory.get()),
// catalog.getMetastore(),
// database,
// table,
// location)
// return new BaseTable(operations, of.toString());
Table tbl;
try {
tbl = getAuthenticator().doAs(() -> getCatalog().loadTable(of));
} catch (IOException e) {
throw new RuntimeException(e);
}
Map<String, String> extProps = getProperties();
initIcebergTableFileIO(tbl, extProps);
return tbl;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,27 @@
package org.apache.doris.datasource.iceberg;

import org.apache.doris.catalog.HdfsResource;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import org.apache.doris.datasource.CatalogProperty;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.nereids.exceptions.AnalysisException;

import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.hadoop.HadoopCatalog;

import java.io.IOException;
import java.util.Map;

public class IcebergHadoopExternalCatalog extends IcebergExternalCatalog {

private HadoopAuthenticator authenticator;

public IcebergHadoopExternalCatalog(long catalogId, String name, String resource, Map<String, String> props,
String comment) {
super(catalogId, name, comment);
Expand All @@ -49,6 +57,15 @@ public IcebergHadoopExternalCatalog(long catalogId, String name, String resource
}
}

@Override
public synchronized HadoopAuthenticator getAuthenticator() {
if (authenticator == null) {
AuthenticationConfig config = AuthenticationConfig.getKerberosConfig(getConfiguration());
authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}
return authenticator;
}

@Override
protected void initCatalog() {
icebergCatalogType = ICEBERG_HADOOP;
Expand All @@ -60,7 +77,32 @@ protected void initCatalog() {
String warehouse = catalogProperty.getHadoopProperties().get(CatalogProperties.WAREHOUSE_LOCATION);
hadoopCatalog.setConf(conf);
catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, warehouse);
hadoopCatalog.initialize(icebergCatalogType, catalogProperties);
try {
getAuthenticator().doAsNoReturn(() -> hadoopCatalog.initialize(icebergCatalogType, catalogProperties));
} catch (IOException e) {
throw new AnalysisException(e.getMessage(), e);
}
catalog = hadoopCatalog;
}

@Override
public Table loadTable(TableIdentifier of) {
// todo
// FileOperations operations = new FileOperations(
// FileSystemFactory.get()),
// catalog.getMetastore(),
// database,
// table,
// location);
// return new BaseTable(operations, of.toString());
Table tbl;
try {
tbl = getAuthenticator().doAs(() -> getCatalog().loadTable(of));
} catch (IOException e) {
throw new RuntimeException(e);
}
Map<String, String> extProps = getProperties();
initIcebergTableFileIO(tbl, extProps);
return tbl;
}
}
Loading