Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,17 @@ public T copyObject(T message) {
}
}

public static <T, DELEGATE> DelegatedCodec<T, DELEGATE> decodeOnly(
Codec<DELEGATE> delegate, CheckedFunction<DELEGATE, T, CodecException> forward, Class<T> clazz) {
return new DelegatedCodec<>(delegate, forward, unsupportedBackward(), clazz, CopyType.DEEP);
}

private static <A, B> CheckedFunction<A, B, CodecException> unsupportedBackward() {
return a -> {
throw new UnsupportedOperationException("Unsupported backward conversion");
};
}

@Override
public String toString() {
return name;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1172,6 +1172,25 @@ message KeyInfo {
optional uint64 expectedDataGeneration = 22;
}

// KeyInfoProtoLight is a lightweight subset of KeyInfo message containing
// selected fields only, while maintaining the same field indices as KeyInfo
// for compatibility and consistency.
message KeyInfoProtoLight {
required string volumeName = 1;
required string bucketName = 2;
required string keyName = 3;
required uint64 dataSize = 4;
required hadoop.hdds.ReplicationType type = 5;
optional hadoop.hdds.ReplicationFactor factor = 6;
required uint64 creationTime = 8;
required uint64 modificationTime = 9;
optional uint64 objectID = 14;
optional uint64 updateID = 15;
optional uint64 parentID = 16;
optional hadoop.hdds.ECReplicationConfig ecReplicationConfig = 17;
optional bool isFile = 19;
}

message BasicKeyInfo {
optional string keyName = 1;
optional uint64 dataSize = 2;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,11 @@
import org.apache.hadoop.ozone.recon.ReconUtils;
import org.apache.hadoop.ozone.recon.api.handlers.BucketHandler;
import org.apache.hadoop.ozone.recon.api.types.KeyEntityInfo;
import org.apache.hadoop.ozone.recon.api.types.KeyEntityInfoProtoWrapper;
import org.apache.hadoop.ozone.recon.api.types.KeyInsightInfoResponse;
import org.apache.hadoop.ozone.recon.api.types.ListKeysResponse;
import org.apache.hadoop.ozone.recon.api.types.NSSummary;
import org.apache.hadoop.ozone.recon.api.types.ParamInfo;
import org.apache.hadoop.ozone.recon.api.types.ReconBasicOmKeyInfo;
import org.apache.hadoop.ozone.recon.api.types.ResponseStatus;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
import org.apache.hadoop.ozone.recon.spi.impl.ReconNamespaceSummaryManagerImpl;
Expand Down Expand Up @@ -1014,7 +1014,7 @@ public Response listKeys(@QueryParam("replicationType") String replicationType,
listKeysResponse = (ListKeysResponse) response.getEntity();
}

List<KeyEntityInfoProtoWrapper> keyInfoList = listKeysResponse.getKeys();
List<ReconBasicOmKeyInfo> keyInfoList = listKeysResponse.getKeys();
if (!keyInfoList.isEmpty()) {
listKeysResponse.setLastKey(keyInfoList.get(keyInfoList.size() - 1).getKey());
}
Expand All @@ -1029,9 +1029,9 @@ private Response getListKeysResponse(ParamInfo paramInfo) {
long replicatedTotal = 0;
long unreplicatedTotal = 0;

// Search keys from non-FSO layout.
Table<String, KeyEntityInfoProtoWrapper> keyTable =
omMetadataManager.getKeyTableLite(BucketLayout.LEGACY);
Table<String, ReconBasicOmKeyInfo> keyTable =
omMetadataManager.getKeyTableBasic(BucketLayout.LEGACY);

retrieveKeysFromTable(keyTable, paramInfo, listKeysResponse.getKeys());

// Search keys from FSO layout.
Expand All @@ -1042,7 +1042,7 @@ private Response getListKeysResponse(ParamInfo paramInfo) {
return ReconResponseUtils.noMatchedKeysResponse(paramInfo.getStartPrefix());
}

for (KeyEntityInfoProtoWrapper keyEntityInfo : listKeysResponse.getKeys()) {
for (ReconBasicOmKeyInfo keyEntityInfo : listKeysResponse.getKeys()) {
replicatedTotal += keyEntityInfo.getReplicatedSize();
unreplicatedTotal += keyEntityInfo.getSize();
}
Expand All @@ -1067,13 +1067,14 @@ private Response getListKeysResponse(ParamInfo paramInfo) {
}
}

public void searchKeysInFSO(ParamInfo paramInfo, List<KeyEntityInfoProtoWrapper> results)
public void searchKeysInFSO(ParamInfo paramInfo, List<ReconBasicOmKeyInfo> results)
throws IOException {
// Convert the search prefix to an object path for FSO buckets
String startPrefixObjectPath = convertStartPrefixPathToObjectIdPath(paramInfo.getStartPrefix());
String[] names = parseRequestPath(startPrefixObjectPath);
Table<String, KeyEntityInfoProtoWrapper> fileTable =
omMetadataManager.getKeyTableLite(BucketLayout.FILE_SYSTEM_OPTIMIZED);

Table<String, ReconBasicOmKeyInfo> fileTable =
omMetadataManager.getKeyTableBasic(BucketLayout.FILE_SYSTEM_OPTIMIZED);

// If names.length > 2, then the search prefix is at the level above bucket level hence
// no need to find parent or extract id's or find subpaths as the fileTable is
Expand Down Expand Up @@ -1181,16 +1182,16 @@ public String convertStartPrefixPathToObjectIdPath(String startPrefixPath)
* @throws IOException If there are problems accessing the table.
*/
private void retrieveKeysFromTable(
Table<String, KeyEntityInfoProtoWrapper> table, ParamInfo paramInfo, List<KeyEntityInfoProtoWrapper> results)
Table<String, ReconBasicOmKeyInfo> table, ParamInfo paramInfo, List<ReconBasicOmKeyInfo> results)
throws IOException {
boolean skipPrevKey = false;
String seekKey = paramInfo.getPrevKey();
try (
TableIterator<String, ? extends Table.KeyValue<String, KeyEntityInfoProtoWrapper>> keyIter = table.iterator()) {
TableIterator<String, ? extends Table.KeyValue<String, ReconBasicOmKeyInfo>> keyIter = table.iterator()) {

if (!paramInfo.isSkipPrevKeyDone() && isNotBlank(seekKey)) {
skipPrevKey = true;
Table.KeyValue<String, KeyEntityInfoProtoWrapper> seekKeyValue =
Table.KeyValue<String, ReconBasicOmKeyInfo> seekKeyValue =
keyIter.seek(seekKey);

// check if RocksDB was able to seek correctly to the given key prefix
Expand All @@ -1207,7 +1208,7 @@ private void retrieveKeysFromTable(
StringBuilder keyPrefix = null;
int keyPrefixLength = 0;
while (keyIter.hasNext()) {
Table.KeyValue<String, KeyEntityInfoProtoWrapper> entry = keyIter.next();
Table.KeyValue<String, ReconBasicOmKeyInfo> entry = keyIter.next();
String dbKey = entry.getKey();
if (!dbKey.startsWith(paramInfo.getStartPrefix())) {
break; // Exit the loop if the key no longer matches the prefix
Expand All @@ -1217,7 +1218,7 @@ private void retrieveKeysFromTable(
continue;
}
if (applyFilters(entry, paramInfo)) {
KeyEntityInfoProtoWrapper keyEntityInfo = entry.getValue();
ReconBasicOmKeyInfo keyEntityInfo = entry.getValue();
keyEntityInfo.setKey(dbKey);
if (keyEntityInfo.getParentId() == 0) {
// Legacy bucket keys have a parentID of zero. OBS bucket keys have a parentID of the bucketID.
Expand Down Expand Up @@ -1258,7 +1259,7 @@ private void retrieveKeysFromTable(
}
}

private boolean applyFilters(Table.KeyValue<String, KeyEntityInfoProtoWrapper> entry, ParamInfo paramInfo)
private boolean applyFilters(Table.KeyValue<String, ReconBasicOmKeyInfo> entry, ParamInfo paramInfo)
throws IOException {

LOG.debug("Applying filters on : {}", entry.getKey());
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public class ListKeysResponse {

/** list of keys. */
@JsonProperty("keys")
private List<KeyEntityInfoProtoWrapper> keys;
private List<ReconBasicOmKeyInfo> keys;

public ListKeysResponse() {
this.status = ResponseStatus.OK;
Expand Down Expand Up @@ -92,11 +92,11 @@ public void setPath(String path) {
this.path = path;
}

public List<KeyEntityInfoProtoWrapper> getKeys() {
public List<ReconBasicOmKeyInfo> getKeys() {
return keys;
}

public void setKeys(List<KeyEntityInfoProtoWrapper> keys) {
public void setKeys(List<ReconBasicOmKeyInfo> keys) {
this.keys = keys;
}

Expand Down
Loading