Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 14 additions & 11 deletions dev-support/ci/selective_ci_checks.bats
Original file line number Diff line number Diff line change
Expand Up @@ -177,17 +177,20 @@ load bats-assert/load.bash
assert_output -p needs-kubernetes-tests=false
}

@test "native test in other module" {
run dev-support/ci/selective_ci_checks.sh 7d01cc14a6

assert_output -p 'basic-checks=["rat","author","checkstyle","findbugs","native","unit"]'
assert_output -p needs-build=true
assert_output -p needs-compile=true
assert_output -p needs-compose-tests=false
assert_output -p needs-dependency-check=false
assert_output -p needs-integration-tests=false
assert_output -p needs-kubernetes-tests=false
}
# disabled, because this test fails if
# hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdb/util/TestManagedSstFileReader.java
# is not present in the current tree (i.e. if file is renamed, moved or deleted)
#@test "native test in other module" {
# run dev-support/ci/selective_ci_checks.sh 7d01cc14a6
#
# assert_output -p 'basic-checks=["rat","author","checkstyle","findbugs","native","unit"]'
# assert_output -p needs-build=true
# assert_output -p needs-compile=true
# assert_output -p needs-compose-tests=false
# assert_output -p needs-dependency-check=false
# assert_output -p needs-integration-tests=false
# assert_output -p needs-kubernetes-tests=false
#}

@test "kubernetes only" {
run dev-support/ci/selective_ci_checks.sh 5336bb9bd
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,16 @@
import static java.nio.charset.StandardCharsets.UTF_8;

/**
* ManagedSstFileReader provides an abstraction layer using which we can
* iterate over multiple underlying SST files transparently.
* Provides an abstraction layer using which we can iterate over multiple
* underlying SST files transparently.
*/
public class ManagedSstFileReader {
public class SstFileSetReader {

private final Collection<String> sstFiles;

private volatile long estimatedTotalKeys = -1;

public ManagedSstFileReader(final Collection<String> sstFiles) {
public SstFileSetReader(final Collection<String> sstFiles) {
this.sstFiles = sstFiles;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
/**
* ManagedSstFileReader tests.
*/
class TestManagedSstFileReader {
class TestSstFileSetReader {

@TempDir
private File tempDir;
Expand Down Expand Up @@ -146,7 +146,7 @@ public void testGetKeyStream(int numberOfFiles)
.collect(Collectors.toMap(Map.Entry::getKey,
Map.Entry::getValue));
try (Stream<String> keyStream =
new ManagedSstFileReader(files).getKeyStream(
new SstFileSetReader(files).getKeyStream(
lowerBound.orElse(null), upperBound.orElse(null))) {
keyStream.forEach(key -> {
Assertions.assertEquals(1, keysInBoundary.get(key));
Expand Down Expand Up @@ -194,7 +194,7 @@ public void testGetKeyStreamWithTombstone(int numberOfFiles)
.orElse(true))
.collect(Collectors.toMap(Map.Entry::getKey,
Map.Entry::getValue));
try (Stream<String> keyStream = new ManagedSstFileReader(files)
try (Stream<String> keyStream = new SstFileSetReader(files)
.getKeyStreamWithTombstone(sstDumpTool, lowerBound.orElse(null),
upperBound.orElse(null))) {
keyStream.forEach(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
import org.apache.hadoop.ozone.snapshot.SnapshotDiffResponse.JobStatus;
import org.apache.hadoop.util.ClosableIterator;
import org.apache.logging.log4j.util.Strings;
import org.apache.ozone.rocksdb.util.ManagedSstFileReader;
import org.apache.ozone.rocksdb.util.SstFileSetReader;
import org.apache.ozone.rocksdb.util.RdbUtil;
import org.apache.ozone.rocksdiff.DifferSnapshotInfo;
import org.apache.ozone.rocksdiff.RocksDBCheckpointDiffer;
Expand Down Expand Up @@ -1083,7 +1083,7 @@ void addToObjectIdMap(Table<String, ? extends WithParentObjectId> fsTable,
String tablePrefix = getTablePrefix(tablePrefixes, fsTable.getName());
boolean isDirectoryTable =
fsTable.getName().equals(DIRECTORY_TABLE);
ManagedSstFileReader sstFileReader = new ManagedSstFileReader(deltaFiles);
SstFileSetReader sstFileReader = new SstFileSetReader(deltaFiles);
validateEstimatedKeyChangesAreInLimits(sstFileReader);
String sstFileReaderLowerBound = tablePrefix;
String sstFileReaderUpperBound = null;
Expand Down Expand Up @@ -1204,7 +1204,7 @@ Set<String> getDeltaFiles(OmSnapshot fromSnapshot,
}

private void validateEstimatedKeyChangesAreInLimits(
ManagedSstFileReader sstFileReader
SstFileSetReader sstFileReader
) throws RocksDBException, IOException {
if (sstFileReader.getEstimatedTotalKeys() >
maxAllowedKeyChangesForASnapDiff) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ClosableIterator;
import org.apache.hadoop.util.ExitUtil;
import org.apache.ozone.rocksdb.util.ManagedSstFileReader;
import org.apache.ozone.rocksdb.util.SstFileSetReader;
import org.apache.ozone.rocksdb.util.RdbUtil;
import org.apache.ozone.rocksdiff.DifferSnapshotInfo;
import org.apache.ozone.rocksdiff.RocksDBCheckpointDiffer;
Expand Down Expand Up @@ -651,8 +651,8 @@ public void testObjectIdMapWithTombstoneEntries(boolean nativeLibraryLoaded,
.map(i -> (i + 100) + "/key" + i).collect(Collectors.toSet());

// Mocking SSTFileReader functions to return the above keys list.
try (MockedConstruction<ManagedSstFileReader> mockedSSTFileReader =
Mockito.mockConstruction(ManagedSstFileReader.class,
try (MockedConstruction<SstFileSetReader> mockedSSTFileReader =
Mockito.mockConstruction(SstFileSetReader.class,
(mock, context) -> {
when(mock.getKeyStreamWithTombstone(any(), any(), any()))
.thenReturn(keysIncludingTombstones.stream());
Expand Down