From caf60fdb42bba58ecdfd8f14b672915ecbe62373 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Fri, 10 Jul 2020 17:29:14 +0200 Subject: [PATCH 01/16] HDDS-3925. Add first implementation that uses the approach that removes via the iterator. --- .../hdds/utils/db/RDBStoreIterator.java | 20 +++++++++++ .../apache/hadoop/hdds/utils/db/RDBTable.java | 2 +- .../hadoop/hdds/utils/db/TypedTable.java | 5 +++ .../hdds/scm/metadata/PipelineIDCodec.java | 36 +++++++++++++++++-- .../hdds/scm/pipeline/SCMPipelineManager.java | 23 +++++++++++- 5 files changed, 82 insertions(+), 4 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java index 784738b0cec2..e028edc52fcb 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java @@ -32,12 +32,18 @@ public class RDBStoreIterator implements TableIterator { private RocksIterator rocksDBIterator; + private RDBTable rocksDBTable; public RDBStoreIterator(RocksIterator iterator) { this.rocksDBIterator = iterator; rocksDBIterator.seekToFirst(); } + public RDBStoreIterator(RocksIterator iterator, RDBTable table) { + this(iterator); + this.rocksDBTable = table; + } + @Override public void forEachRemaining( Consumer action) { @@ -100,6 +106,20 @@ public ByteArrayKeyValue value() { return null; } + @Override + public void remove() { + if (rocksDBTable == null) { + throw new UnsupportedOperationException("remove"); + } + if (rocksDBIterator.isValid()) { + try { + rocksDBTable.delete(rocksDBIterator.key()); + } catch (IOException e) { + throw new RuntimeException("Failed to delete key from RocksDB.", e); + } + } + } + @Override public void close() throws IOException { rocksDBIterator.close(); diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java index 2e390e2362d1..4dbb59ad4412 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBTable.java @@ -206,7 +206,7 @@ public void deleteWithBatch(BatchOperation batch, byte[] key) public TableIterator iterator() { ReadOptions readOptions = new ReadOptions(); readOptions.setFillCache(false); - return new RDBStoreIterator(db.newIterator(handle, readOptions)); + return new RDBStoreIterator(db.newIterator(handle, readOptions), this); } @Override diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index 86d23afb9318..beade05d887e 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -420,5 +420,10 @@ public TypedKeyValue next() { return new TypedKeyValue(rawIterator.next(), keyType, valueType); } + + @Override + public void remove() { + rawIterator.remove(); + } } } diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java index d661e3467b2c..67968d5e5980 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdds.scm.metadata; import java.io.IOException; +import java.util.UUID; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.hdds.utils.db.Codec; @@ -30,12 +31,43 @@ public class PipelineIDCodec implements Codec { @Override public byte[] toPersistedFormat(PipelineID object) throws IOException { - return object.getProtobuf().toByteArray(); + byte[] bytes = new byte[16]; + System.arraycopy( + asByteArray(object.getId().getLeastSignificantBits()), 0, bytes, 8, 8); + System.arraycopy( + asByteArray(object.getId().getMostSignificantBits()), 0, bytes, 0, 8); + return bytes; + } + + private byte[] asByteArray(long bits) { + byte[] bytes = new byte[8]; + for (int i = 0; i < 8; i++) { + bytes[i] = (byte) (bits >> (i * 8) & 0x00000000000000FF); + } + return bytes; } @Override public PipelineID fromPersistedFormat(byte[] rawData) throws IOException { - return null; + if (rawData.length!=16) { + throw new IllegalArgumentException("Invalid key in DB."); + } + long leastSignificantBits = toLong(rawData, 8); + long mostSiginificantBits = toLong(rawData, 0); + + UUID id = new UUID(mostSiginificantBits, leastSignificantBits); + return PipelineID.valueOf(id); + } + + private long toLong(byte[] arr, int startIdx) { + if (arr.length < startIdx + 8) { + throw new ArrayIndexOutOfBoundsException(); + } + long val = 0x0000000000000000L; + for (int i=7; i>=0; i--) { + val |= ((long) arr[i+startIdx]) << i * 8; + } + return val; } @Override diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java index e8223ca50455..f5c7de89e60a 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java @@ -161,12 +161,33 @@ protected void initializePipelineState() throws IOException { TableIterator> iterator = pipelineStore.iterator(); while (iterator.hasNext()) { - Pipeline pipeline = iterator.next().getValue(); + Pipeline pipeline = nextPipelineFromIterator(iterator); stateManager.addPipeline(pipeline); nodeManager.addPipeline(pipeline); } } + private Pipeline nextPipelineFromIterator( + TableIterator> it + ) throws IOException { + KeyValue actual = it.next(); + Pipeline pipeline = actual.getValue(); + PipelineID pipelineID = actual.getKey(); + checkKeyAndReplaceIfObsolete(it, pipeline, pipelineID); + return pipeline; + } + + private void checkKeyAndReplaceIfObsolete( + TableIterator> it, + Pipeline pipeline, + PipelineID pipelineID + ) throws IOException { + if (!pipelineID.equals(pipeline.getId())) { + it.remove(); + pipelineStore.put(pipeline.getId(), pipeline); + } + } + private void recordMetricsForPipeline(Pipeline pipeline) { metrics.incNumPipelineAllocated(); if (pipeline.isOpen()) { From 9f989cbbb836296c2aa6d0ae7420cbda2f755087 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Fri, 10 Jul 2020 20:30:27 +0200 Subject: [PATCH 02/16] HDDS-3925. Addressing comments from internal review and design discussion. --- .../hdds/utils/db/RDBStoreIterator.java | 8 +--- .../hadoop/hdds/utils/db/TableIterator.java | 2 + .../hadoop/hdds/utils/db/TypedTable.java | 4 +- .../hdds/scm/pipeline/SCMPipelineManager.java | 37 +++++++++++++++++-- 4 files changed, 40 insertions(+), 11 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java index e028edc52fcb..5902486ec6ee 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreIterator.java @@ -107,16 +107,12 @@ public ByteArrayKeyValue value() { } @Override - public void remove() { + public void removeFromDB() throws IOException { if (rocksDBTable == null) { throw new UnsupportedOperationException("remove"); } if (rocksDBIterator.isValid()) { - try { - rocksDBTable.delete(rocksDBIterator.key()); - } catch (IOException e) { - throw new RuntimeException("Failed to delete key from RocksDB.", e); - } + rocksDBTable.delete(rocksDBIterator.key()); } } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java index a684157a43b1..1967e5a538b4 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java @@ -60,4 +60,6 @@ public interface TableIterator extends Iterator, Closeable { */ T value(); + void removeFromDB() throws IOException; + } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index beade05d887e..1451946f30dc 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -422,8 +422,8 @@ public TypedKeyValue next() { } @Override - public void remove() { - rawIterator.remove(); + public void removeFromDB() throws IOException { + rawIterator.removeFromDB(); } } } diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java index f5c7de89e60a..fda937134c1c 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/SCMPipelineManager.java @@ -177,14 +177,45 @@ private Pipeline nextPipelineFromIterator( return pipeline; } + /** + * This method is part of the change that happens in HDDS-3925, and we can + * and should remove this on later on. + * The purpose of the change is to get rid of protobuf serialization in the + * SCM database Pipeline table keys. The keys are not used anywhere, and the + * PipelineID that is used as a key is in the value as well, so we can detect + * a change in the key translation to byte[] and if we have the old format + * we refresh the table contents during SCM startup. + * + * If this fails in the remove, then there is an IOException coming from + * RocksDB itself, in this case in memory structures will still be fine and + * SCM should be operational, however we will attempt to replace the old key + * at next startup. In this case removing of the pipeline will leave the + * pipeline in RocksDB, and during next startup we will attempt to delete it + * again. This does not affect any runtime operations. + * If a Pipeline should have been deleted but remained in RocksDB, then at + * next startup it will be replaced and added with the new key, then SCM will + * detect that it is an invalid Pipeline and successfully delete it with the + * new key. + * For further info check the JIRA. + * + * @param it the iterator used to iterate the Pipeline table + * @param pipeline the pipeline read already from the iterator + * @param pipelineID the pipeline ID read from the raw data via the iterator + */ private void checkKeyAndReplaceIfObsolete( TableIterator> it, Pipeline pipeline, PipelineID pipelineID - ) throws IOException { + ) { if (!pipelineID.equals(pipeline.getId())) { - it.remove(); - pipelineStore.put(pipeline.getId(), pipeline); + try { + it.removeFromDB(); + pipelineStore.put(pipeline.getId(), pipeline); + } catch (IOException e) { + LOG.info("Pipeline table in RocksDB has an old key format, and " + + "removing the pipeline with the old key was unsuccessful." + + "Pipeline: {}", pipeline); + } } } From 4fb4dc0bbb1464b3a40a6e30d7998f403807694b Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 00:26:34 +0200 Subject: [PATCH 03/16] Add tests to the codec conversion methods, and change the conversion approach. --- .../hdds/scm/metadata/PipelineIDCodec.java | 38 +++--- .../scm/metadata/TestPipelineIDCodec.java | 123 ++++++++++++++++++ 2 files changed, 143 insertions(+), 18 deletions(-) create mode 100644 hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java index 67968d5e5980..e73539f70fc6 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/metadata/PipelineIDCodec.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hdds.scm.metadata; import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; import java.util.UUID; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; @@ -32,42 +34,42 @@ public class PipelineIDCodec implements Codec { @Override public byte[] toPersistedFormat(PipelineID object) throws IOException { byte[] bytes = new byte[16]; - System.arraycopy( - asByteArray(object.getId().getLeastSignificantBits()), 0, bytes, 8, 8); System.arraycopy( asByteArray(object.getId().getMostSignificantBits()), 0, bytes, 0, 8); + System.arraycopy( + asByteArray(object.getId().getLeastSignificantBits()), 0, bytes, 8, 8); return bytes; } private byte[] asByteArray(long bits) { - byte[] bytes = new byte[8]; - for (int i = 0; i < 8; i++) { - bytes[i] = (byte) (bits >> (i * 8) & 0x00000000000000FF); - } - return bytes; + ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); + buffer.putLong(bits); + return buffer.array(); } @Override public PipelineID fromPersistedFormat(byte[] rawData) throws IOException { - if (rawData.length!=16) { - throw new IllegalArgumentException("Invalid key in DB."); - } - long leastSignificantBits = toLong(rawData, 8); long mostSiginificantBits = toLong(rawData, 0); + long leastSignificantBits = toLong(rawData, 8); UUID id = new UUID(mostSiginificantBits, leastSignificantBits); return PipelineID.valueOf(id); } - private long toLong(byte[] arr, int startIdx) { + private long toLong(byte[] arr, int startIdx) throws IOException { if (arr.length < startIdx + 8) { - throw new ArrayIndexOutOfBoundsException(); - } - long val = 0x0000000000000000L; - for (int i=7; i>=0; i--) { - val |= ((long) arr[i+startIdx]) << i * 8; + throw new IOException("Key conversion error.", + new ArrayIndexOutOfBoundsException( + "Key does not have the least expected amount of bytes," + + "and does not contain a UUID. Key: " + + Arrays.toString(arr) + ) + ); } - return val; + ByteBuffer buffer = ByteBuffer.allocate(Long.BYTES); + buffer.put(arr, startIdx, 8); + buffer.flip(); + return buffer.getLong(); } @Override diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java new file mode 100644 index 000000000000..a7f0aeae35c9 --- /dev/null +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java @@ -0,0 +1,123 @@ +package org.apache.hadoop.hdds.scm.metadata; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +import org.apache.hadoop.hdds.scm.pipeline.PipelineID; +import org.junit.Test; + +import java.util.UUID; + +public class TestPipelineIDCodec { + + @Test + public void testPersistingZeroAsUUID() throws Exception { + long leastSigBits = 0x0000_0000_0000_0000l; + long mostSigBits = 0x0000_0000_0000_0000l; + byte[] expected = new byte[] { + b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), + b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00) + }; + + checkPersisting(leastSigBits, mostSigBits, expected); + } + + @Test + public void testPersistingFFAsUUID() throws Exception { + long leastSigBits = 0xFFFF_FFFF_FFFF_FFFFL; + long mostSigBits = 0xFFFF_FFFF_FFFF_FFFFL; + byte[] expected = new byte[] { + b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), + b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF), b(0xFF) + }; + + checkPersisting(leastSigBits, mostSigBits, expected); + } + + @Test + public void testPersistingARandomUUID() throws Exception { + for (int i=0; i<100; i++) { + UUID uuid = UUID.randomUUID(); + + long mask = 0x0000_0000_0000_00FFL; + + byte[] expected = new byte[] { + b(((int) (uuid.getMostSignificantBits() >> 56 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 48 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 40 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 32 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 24 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 16 & mask))), + b(((int) (uuid.getMostSignificantBits() >> 8 & mask))), + b(((int) (uuid.getMostSignificantBits() & mask))), + + b(((int) (uuid.getLeastSignificantBits() >> 56 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 48 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 40 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 32 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 24 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 16 & mask))), + b(((int) (uuid.getLeastSignificantBits() >> 8 & mask))), + b(((int) (uuid.getLeastSignificantBits() & mask))), + }; + + checkPersisting( + uuid.getMostSignificantBits(), + uuid.getLeastSignificantBits(), + expected + ); + } + } + + @Test + public void testConvertAndReadBackZeroAsUUID() throws Exception { + long mostSigBits = 0x0000_0000_0000_0000L; + long leastSigBits = 0x0000_0000_0000_0000L; + UUID uuid = new UUID(mostSigBits, leastSigBits); + PipelineID pid = PipelineID.valueOf(uuid); + + byte[] encoded = new PipelineIDCodec().toPersistedFormat(pid); + PipelineID decoded = new PipelineIDCodec().fromPersistedFormat(encoded); + + assertEquals(pid, decoded); + } + + @Test + public void testConvertAndReadBackFFAsUUID() throws Exception { + long mostSigBits = 0xFFFF_FFFF_FFFF_FFFFL; + long leastSigBits = 0xFFFF_FFFF_FFFF_FFFFL; + UUID uuid = new UUID(mostSigBits, leastSigBits); + PipelineID pid = PipelineID.valueOf(uuid); + + byte[] encoded = new PipelineIDCodec().toPersistedFormat(pid); + PipelineID decoded = new PipelineIDCodec().fromPersistedFormat(encoded); + + assertEquals(pid, decoded); + } + + @Test + public void testConvertAndReadBackRandomUUID() throws Exception { + UUID uuid = UUID.randomUUID(); + PipelineID pid = PipelineID.valueOf(uuid); + + byte[] encoded = new PipelineIDCodec().toPersistedFormat(pid); + PipelineID decoded = new PipelineIDCodec().fromPersistedFormat(encoded); + + assertEquals(pid, decoded); + } + + private void checkPersisting( + long mostSigBits, long leastSigBits, byte[] expected + ) throws Exception { + UUID uuid = new UUID(mostSigBits, leastSigBits); + PipelineID pid = PipelineID.valueOf(uuid); + + byte[] encoded = new PipelineIDCodec().toPersistedFormat(pid); + + assertArrayEquals(expected, encoded); + } + + private byte b(int i) { + return (byte) (i & 0x0000_00FF); + } +} From d5838fa37f6b0c3a5ba76679e24a94710d01d616 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 03:18:23 +0200 Subject: [PATCH 04/16] HDDs-3925. Added test to changes in SCMPipelineManager. --- .../scm/pipeline/TestSCMPipelineManager.java | 117 ++++++++++++++++++ 1 file changed, 117 insertions(+) diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java index 7c2f17e85840..55b5aa50d95b 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java @@ -21,8 +21,10 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashSet; import java.util.List; +import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; @@ -37,11 +39,15 @@ import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.MockNodeManager; import org.apache.hadoop.hdds.scm.exceptions.SCMException; +import org.apache.hadoop.hdds.scm.metadata.PipelineIDCodec; import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore; import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStoreImpl; import org.apache.hadoop.hdds.scm.safemode.SCMSafeModeManager; import org.apache.hadoop.hdds.scm.server.SCMDatanodeHeartbeatDispatcher.PipelineReportFromDatanode; import org.apache.hadoop.hdds.server.events.EventQueue; +import org.apache.hadoop.hdds.utils.db.Table; +import org.apache.hadoop.hdds.utils.db.Table.KeyValue; +import org.apache.hadoop.hdds.utils.db.TableIterator; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.test.GenericTestUtils; @@ -56,7 +62,15 @@ import static org.junit.Assert.fail; import org.junit.Before; import org.junit.Test; +import org.mockito.InOrder; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; /** * Test cases to verify PipelineManager. @@ -539,6 +553,109 @@ public void testSafeModeUpdatedOnSafemodeExit() pipelineManager.close(); } + /** + * This test was created for HDDS-3925 to check whether the db handling is + * proper at the SCMPipelineManager level. We should remove this test + * when we remove the key swap from the SCMPipelineManager code. + * + * The test emulates internally the values that the iterator will provide + * back to the check-fix code path. The iterator internally deserialize the + * key stored in RocksDB using the PipelineIDCodec. The older version of the + * codec serialized the PipelineIDs by taking the byte[] representation of + * the protobuf representation of the PipelineID, and deserialization was not + * implemented. + * + * In order to be able to check and fix the change, the deserialization was + * introduced, and deserialisation of the old protobuf byte representation + * with the new deserialization logic of the keys are + * checked against the PipelineID serialized in the value as well via + * protobuf. + * The DB is storing the keys now based on a byte[] serialized from the UUID + * inside the PipelineID. + * For this we emulate the getKey of the KeyValue returned by the + * iterator to return a PipelineID that is deserialized from the byte[] + * representation of the protobuf representation of the PipelineID in the + * test, as that would be the value we get from the iterator when iterating + * through a table with the old key format. + * + * @throws Exception when something goes wrong + */ + @Test + public void testPipelineDBKeyFormatChange() throws Exception { + Pipeline p1 = pipelineStub(); + Pipeline p2 = pipelineStub(); + Pipeline p3 = pipelineStub(); + + TableIterator> iteratorMock = + mock(TableIterator.class); + + KeyValue kv1 = + mockKeyValueToProvideOldKeyFormat(p1); + KeyValue kv2 = + mockKeyValueToProvideNormalFormat(p2); + KeyValue kv3 = + mockKeyValueToProvideOldKeyFormat(p3); + + when(iteratorMock.next()) + .thenReturn(kv1, kv2, kv3) + .thenThrow(new NoSuchElementException()); + when(iteratorMock.hasNext()) + .thenReturn(true, true, true, false); + + Table pipelineStore = mock(Table.class); + doReturn(iteratorMock).when(pipelineStore).iterator(); + when(pipelineStore.isEmpty()).thenReturn(false); + + InOrder inorderVerifier = inOrder(pipelineStore, iteratorMock); + + new SCMPipelineManager(conf, nodeManager, pipelineStore, new EventQueue()); + + inorderVerifier.verify(iteratorMock).removeFromDB(); + inorderVerifier.verify(pipelineStore).put(p1.getId(), p1); + inorderVerifier.verify(iteratorMock).removeFromDB(); + inorderVerifier.verify(pipelineStore).put(p3.getId(), p3); + + verify(pipelineStore, never()).put(p2.getId(), p2); + } + + private Pipeline pipelineStub() { + return Pipeline.newBuilder() + .setId(PipelineID.randomId()) + .setType(HddsProtos.ReplicationType.RATIS) + .setFactor(HddsProtos.ReplicationFactor.ONE) + .setState(Pipeline.PipelineState.OPEN) + .setNodes( + Arrays.asList( + nodeManager.getNodes(HddsProtos.NodeState.HEALTHY).get(0) + ) + ) + .setNodesInOrder(Arrays.asList(0)) + .build(); + } + + private KeyValue + mockKeyValueToProvideOldKeyFormat(Pipeline pipeline) + throws IOException { + KeyValue kv = mock(KeyValue.class); + when(kv.getValue()).thenReturn(pipeline); + when(kv.getKey()) + .thenReturn( + new PipelineIDCodec().fromPersistedFormat( + pipeline.getId().getProtobuf().toByteArray() + ) + ); + return kv; + } + + private KeyValue + mockKeyValueToProvideNormalFormat(Pipeline pipeline) + throws IOException { + KeyValue kv = mock(KeyValue.class); + when(kv.getValue()).thenReturn(pipeline); + when(kv.getKey()).thenReturn(pipeline.getId()); + return kv; + } + private void sendPipelineReport(DatanodeDetails dn, Pipeline pipeline, PipelineReportHandler pipelineReportHandler, boolean isLeader, EventQueue eventQueue) { From 74df1ac556eeb247a88ddc9e7282d23addb9d863 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 03:27:24 +0200 Subject: [PATCH 05/16] HDDS-3925. Added missing license, and fixed checkstyle issues. --- .../scm/metadata/TestPipelineIDCodec.java | 25 +++++++++++++++++-- .../scm/pipeline/TestSCMPipelineManager.java | 8 +++--- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java index a7f0aeae35c9..5543be5832b1 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/metadata/TestPipelineIDCodec.java @@ -1,3 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hdds.scm.metadata; import static org.junit.Assert.assertArrayEquals; @@ -8,12 +26,15 @@ import java.util.UUID; +/** + * Testing serialization of PipelineID objects to/from RocksDB. + */ public class TestPipelineIDCodec { @Test public void testPersistingZeroAsUUID() throws Exception { - long leastSigBits = 0x0000_0000_0000_0000l; - long mostSigBits = 0x0000_0000_0000_0000l; + long leastSigBits = 0x0000_0000_0000_0000L; + long mostSigBits = 0x0000_0000_0000_0000L; byte[] expected = new byte[] { b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00), b(0x00) diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java index 55b5aa50d95b..fc8f61a7dbf1 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/pipeline/TestSCMPipelineManager.java @@ -68,7 +68,6 @@ import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -634,7 +633,7 @@ private Pipeline pipelineStub() { } private KeyValue - mockKeyValueToProvideOldKeyFormat(Pipeline pipeline) + mockKeyValueToProvideOldKeyFormat(Pipeline pipeline) throws IOException { KeyValue kv = mock(KeyValue.class); when(kv.getValue()).thenReturn(pipeline); @@ -642,13 +641,12 @@ private Pipeline pipelineStub() { .thenReturn( new PipelineIDCodec().fromPersistedFormat( pipeline.getId().getProtobuf().toByteArray() - ) - ); + )); return kv; } private KeyValue - mockKeyValueToProvideNormalFormat(Pipeline pipeline) + mockKeyValueToProvideNormalFormat(Pipeline pipeline) throws IOException { KeyValue kv = mock(KeyValue.class); when(kv.getValue()).thenReturn(pipeline); From 78f955a5600543be826e26e94d9d3b3ff9b12715 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 04:15:03 +0200 Subject: [PATCH 06/16] Add javadoc to newly added method on TableIterator interface. --- .../java/org/apache/hadoop/hdds/utils/db/TableIterator.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java index 1967e5a538b4..3780c6c291b7 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java @@ -60,6 +60,12 @@ public interface TableIterator extends Iterator, Closeable { */ T value(); + /** + * Removes the actual value of the iterator from the database table on + * which the iterator is working on. + * + * @throws IOException when there is an error occured during deletion. + */ void removeFromDB() throws IOException; } From 09c24775947b9a41c48a8d683153c98f0a1a4ef2 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 04:15:36 +0200 Subject: [PATCH 07/16] Fix typo --- .../java/org/apache/hadoop/hdds/utils/db/TableIterator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java index 3780c6c291b7..c9bc045b1df1 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TableIterator.java @@ -61,7 +61,7 @@ public interface TableIterator extends Iterator, Closeable { T value(); /** - * Removes the actual value of the iterator from the database table on + * Remove the actual value of the iterator from the database table on * which the iterator is working on. * * @throws IOException when there is an error occured during deletion. From e15f7612dfa06d481db745c9d78866aa1b712a28 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Sat, 11 Jul 2020 12:34:53 +0200 Subject: [PATCH 08/16] Trigger a new test run. From 44b9c1c66ec618a94e37c9b272c1853e107795b4 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 02:43:35 +0200 Subject: [PATCH 09/16] HDDS-3925. Review request. Add tests to verify RDBStoreIterator interactions with the underlying RocksIterator and RocksDBTable. --- .../hdds/utils/db/TestRDBStoreIterator.java | 201 ++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java new file mode 100644 index 000000000000..15ae056970c8 --- /dev/null +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java @@ -0,0 +1,201 @@ +package org.apache.hadoop.hdds.utils.db; + +import org.junit.Before; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.InOrder; +import org.rocksdb.RocksIterator; + +import java.util.NoSuchElementException; +import java.util.function.Consumer; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyCollectionOf; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class TestRDBStoreIterator { + + private RocksIterator rocksDBIteratorMock; + private RDBTable rocksTableMock; + + @Before + public void setup() { + rocksDBIteratorMock = mock(RocksIterator.class); + rocksTableMock = mock(RDBTable.class); + } + + @Test + public void testForeachRemainingCallsConsumerWithAllElements() { + when(rocksDBIteratorMock.isValid()) + .thenReturn(true, true, true, true, true, true, false); + when(rocksDBIteratorMock.key()) + .thenReturn(new byte[]{0x00}, new byte[]{0x01}, new byte[]{0x02}) + .thenThrow(new NoSuchElementException()); + when(rocksDBIteratorMock.value()) + .thenReturn(new byte[]{0x7f}, new byte[]{0x7e}, new byte[]{0x7d}) + .thenThrow(new NoSuchElementException()); + + + Consumer consumerStub = mock(Consumer.class); + + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + iter.forEachRemaining(consumerStub); + + ArgumentCaptor capture = + ArgumentCaptor.forClass(ByteArrayKeyValue.class); + verify(consumerStub, times(3)).accept(capture.capture()); + assertArrayEquals( + new byte[]{0x00}, capture.getAllValues().get(0).getKey()); + assertArrayEquals( + new byte[]{0x7f}, capture.getAllValues().get(0).getValue()); + assertArrayEquals( + new byte[]{0x01}, capture.getAllValues().get(1).getKey()); + assertArrayEquals( + new byte[]{0x7e}, capture.getAllValues().get(1).getValue()); + assertArrayEquals( + new byte[]{0x02}, capture.getAllValues().get(2).getKey()); + assertArrayEquals( + new byte[]{0x7d}, capture.getAllValues().get(2).getValue()); + } + + @Test + public void testHasNextDependsOnIsvalid(){ + when(rocksDBIteratorMock.isValid()).thenReturn(true, false); + + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + + assertTrue(iter.hasNext()); + assertFalse(iter.hasNext()); + } + + @Test + public void testNextCallsIsValidThenGetsTheValueAndStepsToNext() { + when(rocksDBIteratorMock.isValid()).thenReturn(true); + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + + InOrder verifier = inOrder(rocksDBIteratorMock); + + iter.next(); + + verifier.verify(rocksDBIteratorMock).isValid(); + verifier.verify(rocksDBIteratorMock).key(); + verifier.verify(rocksDBIteratorMock).value(); + verifier.verify(rocksDBIteratorMock).next(); + } + + @Test + public void testConstructorSeeksToFirstElement() { + new RDBStoreIterator(rocksDBIteratorMock); + + verify(rocksDBIteratorMock, times(1)).seekToFirst(); + } + + @Test + public void testSeekToFirstSeeks() { + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + + iter.seekToFirst(); + + verify(rocksDBIteratorMock, times(2)).seekToFirst(); + } + + @Test + public void testSeekToLastSeeks() { + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + + iter.seekToLast(); + + verify(rocksDBIteratorMock, times(1)).seekToLast(); + } + + @Test + public void testSeekReturnsTheActualKey() { + when(rocksDBIteratorMock.isValid()).thenReturn(true); + when(rocksDBIteratorMock.key()).thenReturn(new byte[]{0x00}); + when(rocksDBIteratorMock.value()).thenReturn(new byte[]{0x7f}); + + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + ByteArrayKeyValue val = iter.seek(new byte[]{0x55}); + + InOrder verifier = inOrder(rocksDBIteratorMock); + + verify(rocksDBIteratorMock, times(1)).seekToFirst(); //at construct time + verify(rocksDBIteratorMock, never()).seekToLast(); + verifier.verify(rocksDBIteratorMock, times(1)).seek(any(byte[].class)); + verifier.verify(rocksDBIteratorMock, times(1)).isValid(); + verifier.verify(rocksDBIteratorMock, times(1)).key(); + verifier.verify(rocksDBIteratorMock, times(1)).value(); + assertArrayEquals(new byte[]{0x00}, val.getKey()); + assertArrayEquals(new byte[]{0x7f}, val.getValue()); + } + + @Test + public void testGettingTheKeyIfIteratorIsValid() { + when(rocksDBIteratorMock.isValid()).thenReturn(true); + when(rocksDBIteratorMock.key()).thenReturn(new byte[]{0x00}); + + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + byte[] key = iter.key(); + + InOrder verifier = inOrder(rocksDBIteratorMock); + + verifier.verify(rocksDBIteratorMock, times(1)).isValid(); + verifier.verify(rocksDBIteratorMock, times(1)).key(); + assertArrayEquals(new byte[]{0x00}, key); + } + + @Test + public void testGettingTheValueIfIteratorIsValid() { + when(rocksDBIteratorMock.isValid()).thenReturn(true); + when(rocksDBIteratorMock.key()).thenReturn(new byte[]{0x00}); + when(rocksDBIteratorMock.value()).thenReturn(new byte[]{0x7f}); + + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + ByteArrayKeyValue val = iter.value(); + + InOrder verifier = inOrder(rocksDBIteratorMock); + + verifier.verify(rocksDBIteratorMock, times(1)).isValid(); + verifier.verify(rocksDBIteratorMock, times(1)).key(); + assertArrayEquals(new byte[]{0x00}, val.getKey()); + assertArrayEquals(new byte[]{0x7f}, val.getValue()); + } + + @Test + public void testRemovingFromDBActuallyDeletesFromTable() throws Exception { + byte[] testKey = new byte[]{0x00}; + when(rocksDBIteratorMock.isValid()).thenReturn(true); + when(rocksDBIteratorMock.key()).thenReturn(testKey); + + RDBStoreIterator iter = + new RDBStoreIterator(rocksDBIteratorMock, rocksTableMock); + iter.removeFromDB(); + + InOrder verifier = inOrder(rocksDBIteratorMock, rocksTableMock); + + verifier.verify(rocksDBIteratorMock, times(1)).isValid(); + verifier.verify(rocksTableMock, times(1)).delete(testKey); + } + + @Test(expected = UnsupportedOperationException.class) + public void testRemoveFromDBWithoutDBTableSet() throws Exception { + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + iter.removeFromDB(); + } + + @Test + public void testCloseCloses() throws Exception { + RDBStoreIterator iter = new RDBStoreIterator(rocksDBIteratorMock); + iter.close(); + + verify(rocksDBIteratorMock, times(1)).close(); + } +} From acd7ba909cc9e70f8428707a2fec8592b9806f28 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 02:48:27 +0200 Subject: [PATCH 10/16] Added license. Removed unused import. --- .../hdds/utils/db/TestRDBStoreIterator.java | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java index 15ae056970c8..0b7549389a00 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ package org.apache.hadoop.hdds.utils.db; import org.junit.Before; @@ -13,7 +31,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyCollectionOf; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; From da6ef7282cf54c700c701b58dad7fb6b0931b166 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 02:57:59 +0200 Subject: [PATCH 11/16] Fix missing javadoc. --- .../apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java index 0b7549389a00..6e85977843ac 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreIterator.java @@ -38,6 +38,12 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +/** + * This test prescribe expected behaviour from the RDBStoreIterator which wraps + * RocksDB's own iterator. Ozone internally in TypedTableIterator uses, the + * RDBStoreIterator to provide iteration over table elements in a typed manner. + * The tests are to ensure we access RocksDB via the iterator properly. + */ public class TestRDBStoreIterator { private RocksIterator rocksDBIteratorMock; From 10c6209405c1a9edfdfba20d8777ad971613f62e Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 09:05:35 +0200 Subject: [PATCH 12/16] Trigger a new test run. From b4cba4ef381a35eee2b88465f0321dd1c35b9d2d Mon Sep 17 00:00:00 2001 From: GitHub actions Date: Tue, 14 Jul 2020 08:53:40 +0000 Subject: [PATCH 13/16] empty commit to retest build From 8344923512918ee987c937f369e31a0f23522f72 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 11:15:00 +0200 Subject: [PATCH 14/16] Trigger a new test run. From 94ade5bc13809122f6efac8414d120bc97027d6d Mon Sep 17 00:00:00 2001 From: GitHub actions Date: Tue, 14 Jul 2020 17:04:49 +0000 Subject: [PATCH 15/16] empty commit to retest build From 4bce3d4935238b0833ece51a465a2d1e84db0742 Mon Sep 17 00:00:00 2001 From: Istvan Fajth Date: Tue, 14 Jul 2020 19:34:04 +0200 Subject: [PATCH 16/16] Trigger a new test run.