Skip to content
Open
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,9 @@ public class CommonParameter {
@Getter
@Setter
public int jsonRpcMaxBlockFilterNum = 50000;
@Getter
@Setter
public int jsonRpcMaxLogFilterNum = 20000;

@Getter
@Setter
Expand Down
5 changes: 5 additions & 0 deletions framework/src/main/java/org/tron/core/config/args/Args.java
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,11 @@ public static void applyConfigParams(
config.getInt(ConfigKey.NODE_JSONRPC_MAX_BLOCK_FILTER_NUM);
}

if (config.hasPath(ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM)) {
PARAMETER.jsonRpcMaxLogFilterNum =
config.getInt(ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM);
}
Comment on lines +268 to +271
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Description: Inspect all uses of the new JSON-RPC log filter cap and related tests.
# Expectation: Enforcement assumes a strictly positive cap, and tests cover zero/negative config values if allowed.

rg -n -C4 '\bjsonRpcMaxLogFilterNum\b|getJsonRpcMaxLogFilterNum|NODE_JSONRPC_MAX_LOG_FILTER_NUM|eth_newFilter|newFilter\s*\(' --glob '*.java'

Repository: 317787106/java-tron

Length of output: 17055


🏁 Script executed:

#!/bin/bash
# Find where maxLogFilterNum is compared/used in enforcement logic
rg -n -B2 -A2 'maxLogFilterNum\s*[<>]=?\s*|<\s*maxLogFilterNum|>\s*maxLogFilterNum' --glob '*.java'

Repository: 317787106/java-tron

Length of output: 45


🏁 Script executed:

#!/bin/bash
# Broader search for all usages of maxLogFilterNum variable
rg -n 'maxLogFilterNum' --glob '*.java' -A3 -B1

Repository: 317787106/java-tron

Length of output: 2332


🏁 Script executed:

#!/bin/bash
# Verify the exact context around lines 268-271 in Args.java
sed -n '260,280p' framework/src/main/java/org/tron/core/config/args/Args.java

Repository: 317787106/java-tron

Length of output: 789


Add validation for maxLogFilterNum to reject non-positive values.

Line 270 accepts zero or negative values, which would break all eth_newFilter calls due to the check at TronJsonRpcImpl.java:1443 (if (eventFilter2Result.size() >= maxLogFilterNum)). Validate that the parameter is positive when loading config to fail fast.

Proposed validation
     if (config.hasPath(ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM)) {
-      PARAMETER.jsonRpcMaxLogFilterNum =
-          config.getInt(ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM);
+      int maxLogFilterNum = config.getInt(ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM);
+      if (maxLogFilterNum <= 0) {
+        throw new IllegalArgumentException(
+            ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM + " must be greater than 0");
+      }
+      PARAMETER.jsonRpcMaxLogFilterNum = maxLogFilterNum;
     }
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@framework/src/main/java/org/tron/core/config/args/Args.java` around lines 268
- 271, When reading ConfigKey.NODE_JSONRPC_MAX_LOG_FILTER_NUM in Args (setting
PARAMETER.jsonRpcMaxLogFilterNum), validate the value is a positive integer (>0)
and reject non-positive values; if invalid, log an error and fail fast (throw an
IllegalArgumentException or exit) so the node won't start with zero/negative
maxLogFilterNum which breaks TronJsonRpcImpl's eth_newFilter logic (see
TronJsonRpcImpl.java:1443). Locate the assignment in Args and add the check
immediately after config.getInt(...), referencing
PARAMETER.jsonRpcMaxLogFilterNum for the value and ensuring a clear error
message that includes the invalid value.


if (config.hasPath(ConfigKey.VM_MIN_TIME_RATIO)) {
PARAMETER.minTimeRatio = config.getDouble(ConfigKey.VM_MIN_TIME_RATIO);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,8 @@ private ConfigKey() {
public static final String NODE_JSONRPC_MAX_SUB_TOPICS = "node.jsonrpc.maxSubTopics";
public static final String NODE_JSONRPC_MAX_BLOCK_FILTER_NUM =
"node.jsonrpc.maxBlockFilterNum";
public static final String NODE_JSONRPC_MAX_LOG_FILTER_NUM =
"node.jsonrpc.maxLogFilterNum";

// node - dns
public static final String NODE_DNS_TREE_URLS = "node.dns.treeUrls";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,9 +291,10 @@ CompilationResult ethSubmitHashrate(String hashrate, String id)
@JsonRpcErrors({
@JsonRpcError(exception = JsonRpcMethodNotFoundException.class, code = -32601, data = "{}"),
@JsonRpcError(exception = JsonRpcInvalidParamsException.class, code = -32602, data = "{}"),
@JsonRpcError(exception = JsonRpcExceedLimitException.class, code = -32005, data = "{}"),
})
String newFilter(FilterRequest fr) throws JsonRpcInvalidParamsException,
JsonRpcMethodNotFoundException;
JsonRpcMethodNotFoundException, JsonRpcExceedLimitException;

@JsonRpcMethod("eth_newBlockFilter")
@JsonRpcErrors({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import lombok.Getter;
Expand Down Expand Up @@ -115,6 +116,7 @@ public enum RequestSource {
private static final String FILTER_NOT_FOUND = "filter not found";
public static final int EXPIRE_SECONDS = 5 * 60;
private static final int maxBlockFilterNum = Args.getInstance().getJsonRpcMaxBlockFilterNum();
private static final int maxLogFilterNum = Args.getInstance().getJsonRpcMaxLogFilterNum();
private static final Cache<LogFilterElement, LogFilterElement> logElementCache =
CacheBuilder.newBuilder()
.maximumSize(300_000L) // 300s * tps(1000) * 1 log/tx ≈ 300_000
Expand Down Expand Up @@ -169,6 +171,8 @@ public enum RequestSource {
private static final String NO_BLOCK_HEADER_BY_HASH = "header for hash not found";

private static final String ERROR_SELECTOR = "08c379a0"; // Function selector for Error(string)
private static final int FILTER_PARALLEL_THRESHOLD = 10000;
private static final ForkJoinPool LOGS_FILTER_POOL = new ForkJoinPool(2);
/**
* thread pool of query section bloom store
*/
Expand Down Expand Up @@ -222,53 +226,68 @@ public static void handleBLockFilter(BlockFilterCapsule blockFilterCapsule) {
* append LogsFilterCapsule's LogFilterElement list to each filter if matched
*/
public static void handleLogsFilter(LogsFilterCapsule logsFilterCapsule) {
Iterator<Entry<String, LogFilterAndResult>> it;
long t1 = System.currentTimeMillis();
Map<String, LogFilterAndResult> eventFilterMap;

if (logsFilterCapsule.isSolidified()) {
it = getEventFilter2ResultSolidity().entrySet().iterator();
eventFilterMap = getEventFilter2ResultSolidity();
} else {
it = getEventFilter2ResultFull().entrySet().iterator();
eventFilterMap = getEventFilter2ResultFull();
}

while (it.hasNext()) {
Entry<String, LogFilterAndResult> entry = it.next();
if (entry.getValue().isExpire()) {
it.remove();
continue;
}
if (eventFilterMap.size() <= FILTER_PARALLEL_THRESHOLD) {
eventFilterMap.entrySet().forEach(
entry -> processLogFilterEntry(entry, eventFilterMap, logsFilterCapsule));
} else {
LOGS_FILTER_POOL.submit(() -> eventFilterMap.entrySet().parallelStream()
.forEach(entry -> processLogFilterEntry(entry, eventFilterMap, logsFilterCapsule))
).join();
}
long t2 = System.currentTimeMillis();
logger.debug("handleLogsFilter {} cost {}, filter size {}",
Comment thread
317787106 marked this conversation as resolved.
logsFilterCapsule.isSolidified() ? "Solidity" : "Full", t2 - t1, eventFilterMap.size());
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

private static void processLogFilterEntry(
Map.Entry<String, LogFilterAndResult> entry,
Map<String, LogFilterAndResult> eventFilterMap,
LogsFilterCapsule logsFilterCapsule) {
LogFilterAndResult logFilterAndResult = entry.getValue();
if (logFilterAndResult.isExpire()) {
eventFilterMap.remove(entry.getKey());
return;
}

LogFilterAndResult logFilterAndResult = entry.getValue();
long fromBlock = logFilterAndResult.getLogFilterWrapper().getFromBlock();
long toBlock = logFilterAndResult.getLogFilterWrapper().getToBlock();
if (!(fromBlock <= logsFilterCapsule.getBlockNumber()
&& logsFilterCapsule.getBlockNumber() <= toBlock)) {
continue;
}
long blockNumber = logsFilterCapsule.getBlockNumber();
long fromBlock = logFilterAndResult.getLogFilterWrapper().getFromBlock();
long toBlock = logFilterAndResult.getLogFilterWrapper().getToBlock();
if (!(fromBlock <= blockNumber && blockNumber <= toBlock)) {
return;
}

if (logsFilterCapsule.getBloom() != null
&& !logFilterAndResult.getLogFilterWrapper().getLogFilter()
.matchBloom(logsFilterCapsule.getBloom())) {
continue;
}
if (logsFilterCapsule.getBloom() != null && !logFilterAndResult.getLogFilterWrapper()
.getLogFilter().matchBloom(logsFilterCapsule.getBloom())) {
return;
}

LogFilter logFilter = logFilterAndResult.getLogFilterWrapper().getLogFilter();
List<LogFilterElement> elements =
LogMatch.matchBlock(logFilter, logsFilterCapsule.getBlockNumber(),
logsFilterCapsule.getBlockHash(), logsFilterCapsule.getTxInfoList(),
logsFilterCapsule.isRemoved());
LogFilter logFilter = logFilterAndResult.getLogFilterWrapper().getLogFilter();
List<LogFilterElement> elements =
LogMatch.matchBlock(logFilter, blockNumber, logsFilterCapsule.getBlockHash(),
logsFilterCapsule.getTxInfoList(), logsFilterCapsule.isRemoved());

for (LogFilterElement element : elements) {
LogFilterElement cachedElement;
try {
// compare with hashcode() first, then with equals(). If not exist, put it.
cachedElement = logElementCache.get(element, () -> element);
} catch (ExecutionException e) {
logger.error("Getting/loading LogFilterElement from cache fails", e); // never happen
cachedElement = element;
}
logFilterAndResult.getResult().add(cachedElement);
List<LogFilterElement> localResults = new ArrayList<>(elements.size());
for (LogFilterElement element : elements) {
LogFilterElement cachedElement;
try {
// compare with hashcode() first, then with equals(). If not exist, put it.
cachedElement = logElementCache.get(element, () -> element);
} catch (ExecutionException e) {
logger.error("Getting/loading LogFilterElement from cache fails", e); // never happen
cachedElement = element;
}
localResults.add(cachedElement);
}
logFilterAndResult.getResult().addAll(localResults);
}

@Override
Expand Down Expand Up @@ -1406,7 +1425,7 @@ public CompilationResult ethSubmitHashrate(String hashrate, String id)

@Override
public String newFilter(FilterRequest fr) throws JsonRpcInvalidParamsException,
JsonRpcMethodNotFoundException {
JsonRpcMethodNotFoundException, JsonRpcExceedLimitException {
disableInPBFT("eth_newFilter");

// not supports finalized as block parameter
Expand All @@ -1421,7 +1440,10 @@ public String newFilter(FilterRequest fr) throws JsonRpcInvalidParamsException,
} else {
eventFilter2Result = eventFilter2ResultSolidity;
}

if (eventFilter2Result.size() >= maxLogFilterNum) {
throw new JsonRpcExceedLimitException(
"exceed max log filters: " + maxLogFilterNum + ", try again later");
}
Comment on lines +1443 to +1446
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

TOCTOU race: size check is not atomic with insert.

size() check on line 1443 and put() on line 1450 are not performed atomically on the ConcurrentHashMap. Under concurrent eth_newFilter requests, multiple callers can observe size() < maxLogFilterNum simultaneously and all proceed to insert, exceeding the configured cap. Given the default cap is 20,000 this is a soft limit abuse concern rather than a correctness bug, but it defeats the purpose of the cap under burst load.

Consider using compute/computeIfAbsent pattern, or re-check size after put and rollback if exceeded:

🛡️ Example hardening
-    if (eventFilter2Result.size() >= maxLogFilterNum) {
-      throw new JsonRpcExceedLimitException(
-          "exceed max log filters: " + maxLogFilterNum + ", try again later");
-    }
     long currentMaxFullNum = wallet.getNowBlock().getBlockHeader().getRawData().getNumber();
     LogFilterAndResult logFilterAndResult = new LogFilterAndResult(fr, currentMaxFullNum, wallet);
     String filterID = generateFilterId();
-    eventFilter2Result.put(filterID, logFilterAndResult);
+    if (eventFilter2Result.size() >= maxLogFilterNum) {
+      throw new JsonRpcExceedLimitException(
+          "exceed max log filters: " + maxLogFilterNum + ", try again later");
+    }
+    eventFilter2Result.put(filterID, logFilterAndResult);
+    if (eventFilter2Result.size() > maxLogFilterNum) {
+      eventFilter2Result.remove(filterID);
+      throw new JsonRpcExceedLimitException(
+          "exceed max log filters: " + maxLogFilterNum + ", try again later");
+    }
     return ByteArray.toJsonHex(filterID);

The same concern applies to newBlockFilter (lines 1465-1468), which is preexisting and out of scope but worth noting for a follow-up.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@framework/src/main/java/org/tron/core/services/jsonrpc/TronJsonRpcImpl.java`
around lines 1443 - 1446, The size() check on eventFilter2Result and the
subsequent put() can race under concurrent eth_newFilter calls; change the
insertion to an atomic compute/computeIfAbsent or use eventFilter2Result.compute
to both test and insert in one atomic operation (referencing eventFilter2Result,
maxLogFilterNum, and the eth_newFilter insertion logic) so that you only
create/accept a new filter if the total stays <= maxLogFilterNum; alternatively,
perform the put and then immediately re-check the map size and remove/rollback
the newly inserted entry if the cap is exceeded to enforce the limit atomically.

long currentMaxFullNum = wallet.getNowBlock().getBlockHeader().getRawData().getNumber();
LogFilterAndResult logFilterAndResult = new LogFilterAndResult(fr, currentMaxFullNum, wallet);
String filterID = generateFilterId();
Expand Down Expand Up @@ -1573,6 +1595,7 @@ public static Object[] getFilterResult(String filterId, Map<String, BlockFilterA

@Override
public void close() throws IOException {
ExecutorServiceManager.shutdownAndAwaitTermination(LOGS_FILTER_POOL, "logs-filter-pool");
logElementCache.invalidateAll();
blockHashCache.invalidateAll();
ExecutorServiceManager.shutdownAndAwaitTermination(sectionExecutor, esName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,14 @@ public LogFilterElement[] matchBlockOneByOne()
String blockHash = manager.getChainBaseManager().getBlockIdByNum(blockNum).toString();
List<LogFilterElement> matchedLog = matchBlock(logFilterWrapper.getLogFilter(), blockNum,
blockHash, transactionInfoList, false);

if (!matchedLog.isEmpty()) {
if (logFilterElementList.size() + matchedLog.size() > LogBlockQuery.MAX_RESULT) {
throw new JsonRpcTooManyResultException(
"query returned more than " + LogBlockQuery.MAX_RESULT + " results");
}
logFilterElementList.addAll(matchedLog);
}

if (logFilterElementList.size() > LogBlockQuery.MAX_RESULT) {
throw new JsonRpcTooManyResultException(
"query returned more than " + LogBlockQuery.MAX_RESULT + " results");
}
}

return logFilterElementList.toArray(new LogFilterElement[0]);
Expand Down
2 changes: 2 additions & 0 deletions framework/src/main/resources/config.conf
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,8 @@ node {
maxSubTopics = 1000
# Allowed maximum number for blockFilter
maxBlockFilterNum = 50000
# Allowed maximum number for newFilter
maxLogFilterNum = 20000
}

# Disabled api list, it will work for http, rpc and pbft, both FullNode and SolidityNode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ public void testDisableInSolidity() {
TronJsonRpcImpl tronJsonRpc = new TronJsonRpcImpl(nodeInfoService, wallet, dbManager);
try {
tronJsonRpc.buildTransaction(buildArguments);
tronJsonRpc.close();
} catch (Exception e) {
Assert.assertEquals("the method buildTransaction does not exist/is not available in "
+ "SOLIDITY", e.getMessage());
Expand Down Expand Up @@ -136,6 +137,7 @@ public void testEnableInFullNode() {

try {
tronJsonRpc.buildTransaction(buildArguments);
tronJsonRpc.close();
} catch (Exception e) {
Assert.fail();
}
Expand Down
1 change: 1 addition & 0 deletions framework/src/test/resources/config-localtest.conf
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ node {
# maxBlockRange = 5000
# maxSubTopics = 1000
# maxBlockFilterNum = 30000
# maxLogFilterNum = 20000
}

}
Expand Down
1 change: 1 addition & 0 deletions framework/src/test/resources/config-test-mainnet.conf
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ node {
# maxBlockRange = 5000
# maxSubTopics = 1000
# maxBlockFilterNum = 50000
# maxLogFilterNum = 20000
}

rpc {
Expand Down
1 change: 1 addition & 0 deletions framework/src/test/resources/config-test.conf
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ node {
# maxBlockRange = 5000
# maxSubTopics = 1000
# maxBlockFilterNum = 30000
# maxLogFilterNum = 20000
}

# use your ipv6 address for node discovery and tcp connection, default false
Expand Down
Loading