From 5d61f6930585b1a8effb46a19fc2edaac9e2c026 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Elek=20M=C3=A1rton?= Date: Fri, 23 Apr 2021 14:07:39 +0200 Subject: [PATCH 1/3] HDDS-5145. Extend Pipline/ReplicationConfig refactor with ECReplicationConfig --- .../hdds/client/ECReplicationConfig.java | 8 ++ .../hadoop/hdds/scm/pipeline/Pipeline.java | 78 ++++++++++--------- .../hdds/scm/pipeline/MockPipeline.java | 19 +++++ .../hdds/scm/pipeline/TestPipeline.java | 19 +++++ .../src/main/proto/hdds.proto | 1 + ...ocationProtocolServerSideTranslatorPB.java | 3 +- 6 files changed, 90 insertions(+), 38 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java index 1d9da50a2ac1..5e3d934d3214 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java @@ -91,4 +91,12 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(data, parity); } + + public static HddsProtos.ECReplicationConfig getProtoOrNull( + ReplicationConfig replicationConfig) { + if (replicationConfig instanceof ECReplicationConfig) { + return ((ECReplicationConfig) replicationConfig).toProto(); + } + return null; + } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java index 90a6d94bea18..26137ab4563a 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java @@ -278,6 +278,44 @@ public ReplicationConfig getReplicationConfig() { return replicationConfig; } + public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline) + throws UnknownPipelineStateException { + Preconditions.checkNotNull(pipeline, "Pipeline is null"); + + List nodes = new ArrayList<>(); + for (DatanodeDetailsProto member : pipeline.getMembersList()) { + nodes.add(DatanodeDetails.getFromProtoBuf(member)); + } + UUID leaderId = null; + if (pipeline.hasLeaderID128()) { + HddsProtos.UUID uuid = pipeline.getLeaderID128(); + leaderId = new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()); + } else if (pipeline.hasLeaderID() && + StringUtils.isNotEmpty(pipeline.getLeaderID())) { + leaderId = UUID.fromString(pipeline.getLeaderID()); + } + + UUID suggestedLeaderId = null; + if (pipeline.hasSuggestedLeaderID()) { + HddsProtos.UUID uuid = pipeline.getSuggestedLeaderID(); + suggestedLeaderId = + new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()); + } + + final ReplicationConfig config = ReplicationConfig + .fromProto(pipeline.getType(), pipeline.getFactor(), + pipeline.getEcReplicationConfig()); + return new Builder().setId(PipelineID.getFromProtobuf(pipeline.getId())) + .setReplicationConfig(config) + .setState(PipelineState.fromProtobuf(pipeline.getState())) + .setNodes(nodes) + .setLeaderId(leaderId) + .setSuggestedLeaderId(suggestedLeaderId) + .setNodesInOrder(pipeline.getMemberOrdersList()) + .setCreateTimestamp(pipeline.getCreationTimeStamp()) + .build(); + } + public HddsProtos.Pipeline getProtobufMessage(int clientVersion) throws UnknownPipelineStateException { @@ -293,6 +331,9 @@ public HddsProtos.Pipeline getProtobufMessage(int clientVersion) .setId(id.getProtobuf()) .setType(replicationConfig.getReplicationType()) .setFactor(ReplicationConfig.getLegacyFactor(replicationConfig)) + .setEcReplicationConfig( + org.apache.hadoop.hdds.client.ECReplicationConfig + .getProtoOrNull(replicationConfig)) .setState(PipelineState.getProtobuf(state)) .setLeaderID(leaderId != null ? leaderId.toString() : "") .setCreationTimeStamp(creationTimestamp.toEpochMilli()) @@ -335,43 +376,6 @@ public HddsProtos.Pipeline getProtobufMessage(int clientVersion) return builder.build(); } - public static Pipeline getFromProtobuf(HddsProtos.Pipeline pipeline) - throws UnknownPipelineStateException { - Preconditions.checkNotNull(pipeline, "Pipeline is null"); - - List nodes = new ArrayList<>(); - for (DatanodeDetailsProto member : pipeline.getMembersList()) { - nodes.add(DatanodeDetails.getFromProtoBuf(member)); - } - UUID leaderId = null; - if (pipeline.hasLeaderID128()) { - HddsProtos.UUID uuid = pipeline.getLeaderID128(); - leaderId = new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()); - } else if (pipeline.hasLeaderID() && - StringUtils.isNotEmpty(pipeline.getLeaderID())) { - leaderId = UUID.fromString(pipeline.getLeaderID()); - } - - UUID suggestedLeaderId = null; - if (pipeline.hasSuggestedLeaderID()) { - HddsProtos.UUID uuid = pipeline.getSuggestedLeaderID(); - suggestedLeaderId = - new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()); - } - - final ReplicationConfig config = ReplicationConfig - .fromProto(pipeline.getType(), pipeline.getFactor()); - return new Builder().setId(PipelineID.getFromProtobuf(pipeline.getId())) - .setReplicationConfig(config) - .setState(PipelineState.fromProtobuf(pipeline.getState())) - .setNodes(nodes) - .setLeaderId(leaderId) - .setSuggestedLeaderId(suggestedLeaderId) - .setNodesInOrder(pipeline.getMemberOrdersList()) - .setCreateTimestamp(pipeline.getCreationTimeStamp()) - .build(); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java index b7b3dc6340d9..1212b4728154 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/MockPipeline.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Objects; +import org.apache.hadoop.hdds.client.ECReplicationConfig; import org.apache.hadoop.hdds.client.RatisReplicationConfig; import org.apache.hadoop.hdds.client.StandaloneReplicationConfig; import org.apache.hadoop.hdds.protocol.DatanodeDetails; @@ -87,6 +88,24 @@ public static Pipeline createRatisPipeline() { .build(); } + public static Pipeline createEcPipeline() { + + List nodes = new ArrayList<>(); + nodes.add(MockDatanodeDetails.randomDatanodeDetails()); + nodes.add(MockDatanodeDetails.randomDatanodeDetails()); + nodes.add(MockDatanodeDetails.randomDatanodeDetails()); + nodes.add(MockDatanodeDetails.randomDatanodeDetails()); + nodes.add(MockDatanodeDetails.randomDatanodeDetails()); + + return Pipeline.newBuilder() + .setState(Pipeline.PipelineState.OPEN) + .setId(PipelineID.randomId()) + .setReplicationConfig( + new ECReplicationConfig(3, 2)) + .setNodes(nodes) + .build(); + } + private MockPipeline() { throw new UnsupportedOperationException("no instances"); } diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java index 504f9495def2..94999bd0d64e 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdds.scm.pipeline; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.junit.Assert; import org.junit.Test; import java.io.IOException; @@ -48,4 +49,22 @@ public void protoIncludesNewPortsOnlyForV1() throws IOException { assertPorts(dn, ALL_PORTS); } } + + @Test + public void getProtobufMessageEC() throws IOException { + Pipeline subject = MockPipeline.createPipeline(3); + + //when EC config is empty + HddsProtos.Pipeline protobufMessage = subject.getProtobufMessage(1); + Assert.assertNull(protobufMessage); + + subject = MockPipeline.createEcPipeline(); + + //when EC config is empty + protobufMessage = subject.getProtobufMessage(1); + Assert.assertEquals(3, protobufMessage.getEcReplicationConfig().getData()); + Assert + .assertEquals(2, protobufMessage.getEcReplicationConfig().getParity()); + + } } diff --git a/hadoop-hdds/interface-client/src/main/proto/hdds.proto b/hadoop-hdds/interface-client/src/main/proto/hdds.proto index dbbb284debf7..ba8f916c4f9f 100644 --- a/hadoop-hdds/interface-client/src/main/proto/hdds.proto +++ b/hadoop-hdds/interface-client/src/main/proto/hdds.proto @@ -114,6 +114,7 @@ message Pipeline { optional uint64 creationTimeStamp = 8; optional UUID suggestedLeaderID = 9; repeated uint32 memberReplicaIndexes = 10; + optional ECReplicationConfig ecReplicationConfig = 11; // TODO(runzhiwang): when leaderID is gone, specify 6 as the index of leaderID128 optional UUID leaderID128 = 100; } diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java index 950e6da925e4..aaa3db329b31 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocolServerSideTranslatorPB.java @@ -186,7 +186,8 @@ public AllocateScmBlockResponseProto allocateScmBlock( request.getNumBlocks(), ReplicationConfig.fromProto( request.getType(), - request.getFactor()), + request.getFactor(), + request.getEcReplicationConfig()), request.getOwner(), ExcludeList.getFromProtoBuf(request.getExcludeList())); From f96f8e9352f8b0be27a8a6a3cdf9c2d47400a764 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Elek=20M=C3=A1rton?= Date: Mon, 3 May 2021 10:19:44 +0200 Subject: [PATCH 2/3] retrigger build From 287371e999278cce2b363f3afe0645343a8dfcf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Elek=20M=C3=A1rton?= Date: Mon, 3 May 2021 13:49:25 +0200 Subject: [PATCH 3/3] fix unit test --- .../hadoop/hdds/client/ECReplicationConfig.java | 7 ------- .../org/apache/hadoop/hdds/scm/pipeline/Pipeline.java | 11 +++++++---- .../apache/hadoop/hdds/scm/pipeline/TestPipeline.java | 7 ++++--- 3 files changed, 11 insertions(+), 14 deletions(-) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java index 5e3d934d3214..5b7d03384803 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/client/ECReplicationConfig.java @@ -92,11 +92,4 @@ public int hashCode() { return Objects.hash(data, parity); } - public static HddsProtos.ECReplicationConfig getProtoOrNull( - ReplicationConfig replicationConfig) { - if (replicationConfig instanceof ECReplicationConfig) { - return ((ECReplicationConfig) replicationConfig).toProto(); - } - return null; - } } diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java index 26137ab4563a..c440b6494efc 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java @@ -34,6 +34,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.hadoop.hdds.client.ECReplicationConfig; import org.apache.hadoop.hdds.client.ReplicationConfig; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; @@ -330,16 +331,18 @@ public HddsProtos.Pipeline getProtobufMessage(int clientVersion) HddsProtos.Pipeline.Builder builder = HddsProtos.Pipeline.newBuilder() .setId(id.getProtobuf()) .setType(replicationConfig.getReplicationType()) - .setFactor(ReplicationConfig.getLegacyFactor(replicationConfig)) - .setEcReplicationConfig( - org.apache.hadoop.hdds.client.ECReplicationConfig - .getProtoOrNull(replicationConfig)) .setState(PipelineState.getProtobuf(state)) .setLeaderID(leaderId != null ? leaderId.toString() : "") .setCreationTimeStamp(creationTimestamp.toEpochMilli()) .addAllMembers(members) .addAllMemberReplicaIndexes(memberReplicaIndexes); + if (replicationConfig instanceof ECReplicationConfig) { + builder.setEcReplicationConfig(((ECReplicationConfig) replicationConfig) + .toProto()); + } else { + builder.setFactor(ReplicationConfig.getLegacyFactor(replicationConfig)); + } if (leaderId != null) { HddsProtos.UUID uuid128 = HddsProtos.UUID.newBuilder() .setMostSigBits(leaderId.getMostSignificantBits()) diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java index 94999bd0d64e..2cf3bf08a90d 100644 --- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestPipeline.java @@ -54,13 +54,14 @@ public void protoIncludesNewPortsOnlyForV1() throws IOException { public void getProtobufMessageEC() throws IOException { Pipeline subject = MockPipeline.createPipeline(3); - //when EC config is empty + //when EC config is empty/null HddsProtos.Pipeline protobufMessage = subject.getProtobufMessage(1); - Assert.assertNull(protobufMessage); + Assert.assertEquals(0, protobufMessage.getEcReplicationConfig().getData()); + + //when EC config is NOT empty subject = MockPipeline.createEcPipeline(); - //when EC config is empty protobufMessage = subject.getProtobufMessage(1); Assert.assertEquals(3, protobufMessage.getEcReplicationConfig().getData()); Assert