Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.OzoneSecurityUtil;
Expand All @@ -47,7 +48,6 @@
import org.apache.hadoop.ozone.client.io.OzoneInputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.ha.OMFailoverProxyProvider;
import org.apache.hadoop.ozone.om.helpers.BucketEncryptionKeyInfo;
import org.apache.hadoop.ozone.om.helpers.OmBucketArgs;
Expand Down Expand Up @@ -763,12 +763,7 @@ public void createS3Bucket(String userName, String s3BucketName)

Preconditions.checkArgument(Strings.isNotBlank(s3BucketName), "bucket " +
"name cannot be null or empty.");
try {
HddsClientUtils.verifyResourceName(s3BucketName);
} catch (IllegalArgumentException exception) {
throw new OMException("Invalid bucket name: " + s3BucketName,
OMException.ResultCodes.INVALID_BUCKET_NAME);
}
OmUtils.validateBucketName(s3BucketName);
ozoneManagerClient.createS3Bucket(userName, s3BucketName);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,10 @@
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
import org.apache.hadoop.hdds.utils.db.DBCheckpoint;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.RepeatedOmKeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
Expand Down Expand Up @@ -528,4 +530,30 @@ public static RepeatedOmKeyInfo prepareKeyForDelete(OmKeyInfo keyInfo,

return repeatedOmKeyInfo;
}

/**
* Verify volume name is a valid DNS name.
*/
public static void validateVolumeName(String volumeName) throws OMException {
try {
HddsClientUtils.verifyResourceName(volumeName);
} catch (IllegalArgumentException e) {
throw new OMException("Invalid volume name: " + volumeName,
OMException.ResultCodes.INVALID_VOLUME_NAME);
}
}

/**
* Verify bucket name is a valid DNS name.
*/
public static void validateBucketName(String bucketName)
throws OMException {
try {
HddsClientUtils.verifyResourceName(bucketName);
} catch (IllegalArgumentException e) {
throw new OMException("Invalid bucket name: " + bucketName,
OMException.ResultCodes.INVALID_BUCKET_NAME);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,8 @@ public enum ResultCodes {

DIRECTORY_ALREADY_EXISTS,

INVALID_VOLUME_NAME,

REPLAY // When ratis logs are replayed.
}
}
2 changes: 2 additions & 0 deletions hadoop-ozone/common/src/main/proto/OzoneManagerProtocol.proto
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,8 @@ enum Status {
CANNOT_CREATE_DIRECTORY_AT_ROOT = 59;
DIRECTORY_ALREADY_EXISTS = 60;

INVALID_VOLUME_NAME = 61;

// When transactions are replayed
REPLAY = 100;
}
Expand Down
2 changes: 1 addition & 1 deletion hadoop-ozone/dist/src/main/compose/ozone-mr/common-config
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ OZONE-SITE.XML_hdds.datanode.dir=/data/hdds
HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012

CORE-SITE.xml_fs.defaultFS=o3fs://bucket1.vol1/
CORE-SITE.xml_fs.defaultFS=o3fs://bucket1.volume1/

MAPRED-SITE.XML_mapreduce.framework.name=yarn
MAPRED-SITE.XML_yarn.app.mapreduce.am.env=HADOOP_MAPRED_HOME=$HADOOP_HOME
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# limitations under the License.

CORE-SITE.XML_fs.o3fs.impl=org.apache.hadoop.fs.ozone.OzoneFileSystem
CORE-SITE.XML_fs.defaultFS=o3fs://bucket.volume.omservice
CORE-SITE.XML_fs.defaultFS=o3fs://bucket1.volume1.omservice
OZONE-SITE.XML_ozone.om.service.ids=omservice
OZONE-SITE.XML_ozone.om.nodes.omservice=om1,om2,om3
OZONE-SITE.XML_ozone.om.address.omservice.om1=om1
Expand Down
12 changes: 6 additions & 6 deletions hadoop-ozone/dist/src/main/compose/ozonesecure-mr/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@ docker-compose exec om bash

kinit -kt /etc/security/keytabs/testuser.keytab testuser/om@EXAMPLE.COM

ozone sh volume create /vol1
ozone sh volume create /volume1

ozone sh bucket create /vol1/bucket1
ozone sh bucket create /volume1/bucket1

ozone sh key put /vol1/bucket1/key1 LICENSE.txt
ozone sh key put /volume1/bucket1/key1 LICENSE.txt

ozone fs -ls o3fs://bucket1.vol1/
ozone fs -ls o3fs://bucket1.volume1/
```

## Yarn Resource Manager Setup
Expand All @@ -57,7 +57,7 @@ hadoop fs -mkdir /user/hadoop

### WordCount
```
yarn jar $HADOOP_MAPRED_HOME/hadoop-mapreduce-examples-*.jar wordcount o3fs://bucket1.vol1/key1 o3fs://bucket1.vol1/key1.count
yarn jar $HADOOP_MAPRED_HOME/hadoop-mapreduce-examples-*.jar wordcount o3fs://bucket1.volume1/key1 o3fs://bucket1.volume1/key1.count

hadoop fs -cat /key1.count/part-r-00000
```
Expand All @@ -69,5 +69,5 @@ yarn jar $HADOOP_MAPRED_HOME/hadoop-mapreduce-examples-*.jar pi 10 100

### RandomWrite
```
yarn jar $HADOOP_MAPRED_HOME/hadoop-mapreduce-examples-*.jar randomwriter -Dtest.randomwrite.total_bytes=10000000 o3fs://bucket1.vol1/randomwrite.out
yarn jar $HADOOP_MAPRED_HOME/hadoop-mapreduce-examples-*.jar randomwriter -Dtest.randomwrite.total_bytes=10000000 o3fs://bucket1.volume1/randomwrite.out
```
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ HDFS-SITE.XML_rpc.metrics.quantile.enable=true
HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300

CORE-SITE.XML_fs.AbstractFileSystem.o3fs.impl=org.apache.hadoop.fs.ozone.OzFs
CORE-SITE.XML_fs.defaultFS=o3fs://bucket1.vol1/
CORE-SITE.XML_fs.defaultFS=o3fs://bucket1.volume1/

MAPRED-SITE.XML_mapreduce.framework.name=yarn
MAPRED-SITE.XML_yarn.app.mapreduce.am.env=HADOOP_MAPRED_HOME=$HADOOP_HOME
Expand Down
2 changes: 1 addition & 1 deletion hadoop-ozone/dist/src/main/smoketest/createbucketenv.robot
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Test Timeout 2 minute


*** Variables ***
${volume} vol1
${volume} volume1
${bucket} bucket1


Expand Down
4 changes: 2 additions & 2 deletions hadoop-ozone/dist/src/main/smoketest/createmrenv.robot
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Test Timeout 2 minute


*** Variables ***
${volume} vol1
${volume} volume1
${bucket} bucket1


Expand All @@ -40,7 +40,7 @@ Create test volume, bucket and key
Run Keyword if "BUCKET_NOT_FOUND" in """${result}""" Create bucket
${result} = Execute ozone sh bucket info /${volume}/${bucket}
Should not contain ${result} NOT_FOUND
Execute ozone sh key put /vol1/bucket1/key1 LICENSE.txt
Execute ozone sh key put /volume1/bucket1/key1 LICENSE.txt

Create user dir for hadoop
Execute ozone fs -mkdir /user
Expand Down
4 changes: 2 additions & 2 deletions hadoop-ozone/dist/src/main/smoketest/mapreduce.robot
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Test Timeout 4 minute


*** Variables ***
${volume} vol1
${volume} volume1
${bucket} bucket1
${hadoop.version} 3.2.0

Expand All @@ -33,5 +33,5 @@ Execute PI calculation

Execute WordCount
${random} Generate Random String 2 [NUMBERS]
${output} = Execute yarn jar ./share/hadoop/mapreduce/hadoop-mapreduce-examples-${hadoop.version}.jar wordcount o3fs://bucket1.vol1/key1 o3fs://bucket1.vol1/key1-${random}.count
${output} = Execute yarn jar ./share/hadoop/mapreduce/hadoop-mapreduce-examples-${hadoop.version}.jar wordcount o3fs://bucket1.volume1/key1 o3fs://bucket1.volume1/key1-${random}.count
Should Contain ${output} completed successfully
4 changes: 2 additions & 2 deletions hadoop-ozone/dist/src/main/smoketest/ozonefs/hadoopo3fs.robot
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@ ${PREFIX} ozone

Test hadoop dfs
${random} = Generate Random String 5 [NUMBERS]
${result} = Execute hdfs dfs -put /opt/hadoop/NOTICE.txt o3fs://bucket1.vol1/${PREFIX}-${random}
${result} = Execute hdfs dfs -ls o3fs://bucket1.vol1/
${result} = Execute hdfs dfs -put /opt/hadoop/NOTICE.txt o3fs://bucket1.volume1/${PREFIX}-${random}
${result} = Execute hdfs dfs -ls o3fs://bucket1.volume1/
Should contain ${result} ${PREFIX}-${random}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import java.util.stream.Collectors;

import com.google.common.base.Optional;

import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
import org.apache.hadoop.ozone.om.helpers.OzoneAclUtil;
Expand Down Expand Up @@ -88,6 +90,8 @@ public OMRequest preExecute(OzoneManager ozoneManager) throws IOException {
CreateBucketRequest createBucketRequest =
getOmRequest().getCreateBucketRequest();
BucketInfo bucketInfo = createBucketRequest.getBucketInfo();
// Verify resource name
OmUtils.validateBucketName(bucketInfo.getBucketName());

// Get KMS provider.
KeyProviderCryptoExtension kmsProvider =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;

import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.om.ratis.utils.OzoneManagerDoubleBufferHelper;
import org.apache.hadoop.ozone.om.request.file.OMFileRequest;
Expand Down Expand Up @@ -64,8 +66,6 @@
import org.apache.hadoop.hdds.utils.db.cache.CacheValue;

import static org.apache.hadoop.ozone.OzoneConsts.OM_S3_VOLUME_PREFIX;
import static org.apache.hadoop.ozone.OzoneConsts.S3_BUCKET_MAX_LENGTH;
import static org.apache.hadoop.ozone.OzoneConsts.S3_BUCKET_MIN_LENGTH;
import static org.apache.hadoop.ozone.om.lock.OzoneManagerLock.Resource.BUCKET_LOCK;
import static org.apache.hadoop.ozone.om.lock.OzoneManagerLock.Resource.S3_BUCKET_LOCK;
import static org.apache.hadoop.ozone.om.lock.OzoneManagerLock.Resource.USER_LOCK;
Expand Down Expand Up @@ -98,14 +98,7 @@ public OMRequest preExecute(OzoneManager ozoneManager) throws IOException {
// TODO: Do we need to enforce the bucket rules in this code path?
// https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html

// For now only checked the length.
int bucketLength = s3CreateBucketRequest.getS3Bucketname().length();
if (bucketLength < S3_BUCKET_MIN_LENGTH ||
bucketLength >= S3_BUCKET_MAX_LENGTH) {
throw new OMException("S3BucketName must be at least 3 and not more " +
"than 63 characters long",
OMException.ResultCodes.S3_BUCKET_INVALID_LENGTH);
}
OmUtils.validateBucketName(s3CreateBucketRequest.getS3Bucketname());

return getOmRequest().toBuilder()
.setCreateS3BucketRequest(newS3CreateBucketRequest)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import java.util.Map;

import com.google.common.base.Preconditions;

import org.apache.hadoop.ozone.OmUtils;
import org.apache.hadoop.ozone.om.ratis.utils.OzoneManagerDoubleBufferHelper;
import org.apache.hadoop.ozone.om.request.file.OMFileRequest;
import org.slf4j.Logger;
Expand Down Expand Up @@ -71,17 +73,17 @@ public OMRequest preExecute(OzoneManager ozoneManager) throws IOException {

VolumeInfo volumeInfo =
getOmRequest().getCreateVolumeRequest().getVolumeInfo();
// Verify resource name
OmUtils.validateVolumeName(volumeInfo.getVolume());

// Set creation time
VolumeInfo updatedVolumeInfo =
volumeInfo.toBuilder().setCreationTime(Time.now()).build();


return getOmRequest().toBuilder().setCreateVolumeRequest(
CreateVolumeRequest.newBuilder().setVolumeInfo(updatedVolumeInfo))
.setUserInfo(getUserInfo())
.build();

}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@

import java.util.UUID;

import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assert;
import org.junit.Test;

Expand Down Expand Up @@ -48,6 +50,9 @@ public void testPreExecute() throws Exception {
String volumeName = UUID.randomUUID().toString();
String bucketName = UUID.randomUUID().toString();
doPreExecute(volumeName, bucketName);
// Verify invalid bucket name throws exception
LambdaTestUtils.intercept(OMException.class, "Invalid bucket name: b1",
() -> doPreExecute("volume1", "b1"));
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void testPreExecuteInvalidBucketLength() throws Exception {
doPreExecute(userName, s3BucketName);
fail("testPreExecuteInvalidBucketLength failed");
} catch (OMException ex) {
GenericTestUtils.assertExceptionContains("S3_BUCKET_INVALID_LENGTH", ex);
GenericTestUtils.assertExceptionContains("INVALID_BUCKET_NAME", ex);
}

// set bucket name which is greater than 63 characters length
Expand All @@ -70,7 +70,7 @@ public void testPreExecuteInvalidBucketLength() throws Exception {
doPreExecute(userName, s3BucketName);
fail("testPreExecuteInvalidBucketLength failed");
} catch (OMException ex) {
GenericTestUtils.assertExceptionContains("S3_BUCKET_INVALID_LENGTH", ex);
GenericTestUtils.assertExceptionContains("INVALID_BUCKET_NAME", ex);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@

import java.util.UUID;

import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.om.request.file.OMFileRequest;
import org.apache.hadoop.ozone.om.response.volume.OMVolumeCreateResponse;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assert;
import org.junit.Test;

Expand All @@ -49,6 +51,9 @@ public void testPreExecute() throws Exception {
String adminName = UUID.randomUUID().toString();
String ownerName = UUID.randomUUID().toString();
doPreExecute(volumeName, adminName, ownerName);
// Verify exception thrown on invalid volume name
LambdaTestUtils.intercept(OMException.class, "Invalid volume name: v1",
() -> doPreExecute("v1", adminName, ownerName));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ public void init() {
attemptCounter = new AtomicLong(0);

if (prefix.length() == 0) {
prefix = RandomStringUtils.randomAlphanumeric(10);
prefix = RandomStringUtils.randomAlphanumeric(10).toLowerCase();
} else {
//replace environment variables to support multi-node execution
prefix = resolvePrefix(prefix);
Expand Down