Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.doris.common.proc.BaseProcResult;
import org.apache.doris.common.util.PrintableMap;
import org.apache.doris.datasource.credentials.CloudCredentialWithEndpoint;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.constants.S3Properties;
import org.apache.doris.fs.remote.S3FileSystem;

Expand Down Expand Up @@ -121,15 +120,6 @@ protected void setProperties(Map<String, String> properties) throws DdlException
private static void pingS3(CloudCredentialWithEndpoint credential, String bucketName, String rootPath,
Map<String, String> properties) throws DdlException {
String bucket = "s3://" + bucketName + "/";
Map<String, String> propertiesPing = new HashMap<>();
propertiesPing.put(S3Properties.Env.ACCESS_KEY, credential.getAccessKey());
propertiesPing.put(S3Properties.Env.SECRET_KEY, credential.getSecretKey());
propertiesPing.put(S3Properties.Env.TOKEN, credential.getSessionToken());
propertiesPing.put(S3Properties.Env.ENDPOINT, credential.getEndpoint());
propertiesPing.put(S3Properties.Env.REGION, credential.getRegion());
propertiesPing.put(PropertyConverter.USE_PATH_STYLE,
properties.getOrDefault(PropertyConverter.USE_PATH_STYLE, "false"));
properties.putAll(propertiesPing);
S3FileSystem fileSystem = new S3FileSystem(properties);
String testFile = bucket + rootPath + "/test-object-valid.txt";
String content = "doris will be better";
Expand All @@ -142,14 +132,14 @@ private static void pingS3(CloudCredentialWithEndpoint credential, String bucket
if (status != Status.OK) {
throw new DdlException(
"ping s3 failed(upload), status: " + status + ", properties: " + new PrintableMap<>(
propertiesPing, "=", true, false, true, false));
properties, "=", true, false, true, false));
}
} finally {
if (status.ok()) {
Status delete = fileSystem.delete(testFile);
if (delete != Status.OK) {
LOG.warn("delete test file failed, status: {}, properties: {}", delete, new PrintableMap<>(
propertiesPing, "=", true, false, true, false));
properties, "=", true, false, true, false));
}
}
}
Expand Down Expand Up @@ -250,3 +240,4 @@ protected void getProcNodeData(BaseProcResult result) {
readUnlock();
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@ protected FileSystem nativeFileSystem(String remotePath) throws UserException {
if (dfsFileSystem == null) {
Configuration conf = new Configuration();
System.setProperty("com.amazonaws.services.s3.enableV4", "true");
PropertyConverter.convertToHadoopFSProperties(properties).forEach(conf::set);
// the entry value in properties may be null, and
PropertyConverter.convertToHadoopFSProperties(properties).entrySet().stream()
.filter(entry -> entry.getKey() != null && entry.getValue() != null)
.forEach(entry -> conf.set(entry.getKey(), entry.getValue()));
try {
dfsFileSystem = FileSystem.get(new Path(remotePath).toUri(), conf);
} catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,7 @@ protected TableValuedFunctionIf toCatalogFunction() {
Map<String, String> arguments = getTVFProperties().getMap();
return new HdfsTableValuedFunction(arguments);
} catch (Throwable t) {
throw new AnalysisException("Can not build HdfsTableValuedFunction by "
+ this + ": " + t.getMessage(), t);
throw new AnalysisException("Can not build hdfs(): " + t.getMessage(), t);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,7 @@ protected TableValuedFunctionIf toCatalogFunction() {
Map<String, String> arguments = getTVFProperties().getMap();
return new LocalTableValuedFunction(arguments);
} catch (Throwable t) {
throw new AnalysisException("Can not build LocalTableValuedFunction by "
+ this + ": " + t.getMessage(), t);
throw new AnalysisException("Can not build local(): " + t.getMessage(), t);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,7 @@ protected TableValuedFunctionIf toCatalogFunction() {
Map<String, String> arguments = getTVFProperties().getMap();
return new S3TableValuedFunction(arguments);
} catch (Throwable t) {
throw new AnalysisException("Can not build S3TableValuedFunction by "
+ this + ": " + t.getMessage(), t);
throw new AnalysisException("Can not build s3(): " + t.getMessage(), t);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,10 @@ public S3TableValuedFunction(Map<String, String> properties) throws AnalysisExce

S3URI s3uri = getS3Uri(uriStr, Boolean.parseBoolean(usePathStyle.toLowerCase()),
Boolean.parseBoolean(forceParsingByStandardUri.toLowerCase()));
String endpoint = getOrDefaultAndRemove(otherProps, S3Properties.ENDPOINT, s3uri.getEndpoint().orElseThrow(() ->
new AnalysisException(String.format("Properties '%s' is required.", S3Properties.ENDPOINT))));
String endpoint = getOrDefaultAndRemove(otherProps, S3Properties.ENDPOINT, s3uri.getEndpoint().orElse(""));
if (Strings.isNullOrEmpty(endpoint)) {
throw new AnalysisException(String.format("Properties '%s' is required.", S3Properties.ENDPOINT));
}
if (!otherProps.containsKey(S3Properties.REGION)) {
String region = s3uri.getRegion().orElseThrow(() ->
new AnalysisException(String.format("Properties '%s' is required.", S3Properties.REGION)));
Expand Down Expand Up @@ -151,3 +153,4 @@ public String getTableName() {
return "S3TableValuedFunction";
}
}