Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 71 additions & 0 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,77 @@ mvn exec:java -pl json-compatibility-suite -Dexec.args="--json"
./mvn-test-no-boilerplate.sh -Dtest=JsonParserTests -Djava.util.logging.ConsoleHandler.level=FINER
```

## Logging Strategy

This project uses `java.util.logging` (JUL) following Oracle's official guidelines with practical compatibility enhancements for cloud monitoring and SLF4J/Log4j bridge users.

### Key Principles

#### 1. Lambda-Based Logging Only
**ALL logging must use lambda expressions** for zero runtime overhead when log levels are disabled:
```java
// ✅ Correct - JVM optimizes away unused lambda branches
LOGGER.fine(() -> "Debug info: " + expensiveOperation());

// ❌ Wrong - string concatenation always executes
LOGGER.fine("Debug info: " + expensiveOperation());
```

#### 2. ERROR Prefix for Cloud Monitoring
SEVERE level messages that warrant cloud log monitoring include `"ERROR:"` prefix for SLF4J/Log4j bridge compatibility:
```java
// Security vulnerabilities, parse failures preventing normal execution
LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on file: " + filename);
LOGGER.severe(() -> "ERROR: Failed to compile JSON schema: " + schemaUri);
```

#### 3. Performance Warning Prefix
Performance threshold violations use `"PERFORMANCE WARNING:"` prefix at FINE level:
```java
LOGGER.fine(() -> "PERFORMANCE WARNING: Validation stack processing " + count +
" items exceeds recommended threshold of " + threshold);
```

#### 4. Oracle JUL Level Hierarchy
- **SEVERE (1000)**: Serious failures preventing normal execution (+ ERROR prefix when appropriate)
- **WARNING (900)**: Potential problems for end users/system managers
- **INFO (800)**: Significant messages for end users/administrators (use sparingly)
- **CONFIG (700)**: Static configuration information for debugging setup issues
- **FINE (500)**: Developer debugging info, minor recoverable failures, performance warnings
- **FINER (400)**: Detailed tracing, method entry/exit, conditional validation branches
- **FINEST (300)**: Maximum detail debugging, stack operations, branch processing

#### 5. Hierarchical Logger Naming
All loggers use fully-qualified class names:
```java
private static final Logger LOGGER = Logger.getLogger(MyClass.class.getName());
```

### Example Usage
```java
// SEVERE with ERROR prefix - prevents normal execution
LOGGER.severe(() -> "ERROR: Failed to parse JSON document: " + error);

// WARNING - recoverable issue
LOGGER.warning(() -> "Deprecated encoding detected, using compatibility mode");

// INFO - significant but not noisy
LOGGER.info(() -> "JSON parser initialized with " + features.size() + " features");

// CONFIG - configuration details
LOGGER.config(() -> "Validation features enabled: " + Arrays.toString(enabledFeatures));

// FINE - developer debugging and performance warnings
LOGGER.fine(() -> "PERFORMANCE WARNING: Deep recursion detected at depth " + depth);
LOGGER.fine(() -> "Cache miss for schema: " + schemaId);

// FINER - detailed tracing
LOGGER.finer(() -> "Entering validation method for schema: " + schemaType);

// FINEST - maximum detail (JVM optimizes away when disabled)
LOGGER.finest(() -> "Stack frame: " + frameDetails);
```

## Releasing to Maven Central

Prerequisites
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ private TestResults runTests() throws Exception {
content = Files.readString(file, StandardCharsets.UTF_8);
charContent = content.toCharArray();
} catch (MalformedInputException e) {
LOGGER.warning("UTF-8 failed for " + filename + ", using robust encoding detection");
LOGGER.warning(() -> "UTF-8 failed for " + filename + ", using robust encoding detection");
try {
byte[] rawBytes = Files.readAllBytes(file);
charContent = RobustCharDecoder.decodeToChars(rawBytes, filename);
Expand All @@ -123,7 +123,7 @@ private TestResults runTests() throws Exception {
} catch (JsonParseException e) {
parseSucceeded = false;
} catch (StackOverflowError e) {
LOGGER.warning("StackOverflowError on file: " + filename);
LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on file: " + filename);
parseSucceeded = false; // Treat as parse failure
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ class RobustCharDecoder {
/// @param filename filename for logging purposes
/// @return char array representing the content
static char[] decodeToChars(byte[] rawBytes, String filename) {
LOGGER.fine("Attempting robust decoding for " + filename + " (" + rawBytes.length + " bytes)");
LOGGER.fine(() -> "Attempting robust decoding for " + filename + " (" + rawBytes.length + " bytes)");

// Stage 1: BOM Detection
BomResult bom = detectBOM(rawBytes);
if (bom.encoding != null) {
LOGGER.fine("BOM detected for " + filename + ": " + bom.encoding.name());
LOGGER.fine(() -> "BOM detected for " + filename + ": " + bom.encoding.name());
char[] result = tryDecodeWithCharset(rawBytes, bom.offset, bom.encoding, filename);
if (result != null) {
return result;
Expand All @@ -53,13 +53,13 @@ static char[] decodeToChars(byte[] rawBytes, String filename) {
for (Charset encoding : encodings) {
char[] result = tryDecodeWithCharset(rawBytes, 0, encoding, filename);
if (result != null) {
LOGGER.fine("Successfully decoded " + filename + " using " + encoding.name());
LOGGER.fine(() -> "Successfully decoded " + filename + " using " + encoding.name());
return result;
}
}

// Stage 3: Byte-level conversion with UTF-8 sequence awareness
LOGGER.fine("Using permissive byte-to-char conversion for " + filename);
LOGGER.fine(() -> "Using permissive byte-to-char conversion for " + filename);
return convertBytesToCharsPermissively(rawBytes);
}

Expand Down Expand Up @@ -96,10 +96,10 @@ private static char[] tryDecodeWithCharset(byte[] bytes, int offset, Charset cha
return result;

} catch (CharacterCodingException e) {
LOGGER.fine("Failed to decode " + filename + " with " + charset.name() + ": " + e.getMessage());
LOGGER.fine(() -> "Failed to decode " + filename + " with " + charset.name() + ": " + e.getMessage());
return null;
} catch (Exception e) {
LOGGER.fine("Unexpected error decoding " + filename + " with " + charset.name() + ": " + e.getMessage());
LOGGER.fine(() -> "Unexpected error decoding " + filename + " with " + charset.name() + ": " + e.getMessage());
return null;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ private DynamicTest createTest(Path file) {
content = Files.readString(file, StandardCharsets.UTF_8);
charContent = content.toCharArray();
} catch (MalformedInputException e) {
LOGGER.warning("UTF-8 failed for " + filename + ", using robust encoding detection");
LOGGER.warning(() -> "UTF-8 failed for " + filename + ", using robust encoding detection");
try {
byte[] rawBytes = Files.readAllBytes(file);
charContent = RobustCharDecoder.decodeToChars(rawBytes, filename);
Expand Down Expand Up @@ -128,8 +128,8 @@ private void testImplementationDefinedSingle(String description, Runnable parseA
} catch (JsonParseException e) {
// OK - we rejected it
} catch (StackOverflowError e) {
// OK - acceptable for deeply nested structures
LOGGER.warning("StackOverflowError on implementation-defined: " + description);
// OK - acceptable for deeply nested structures but still a security concern
LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on implementation-defined: " + description);
} catch (Exception e) {
// NOT OK - unexpected exception type
fail("Unexpected exception for %s: %s", description, e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ static String fetchFromUrl(String url) {
/// Discovers all classes in the local JSON API packages
/// @return sorted set of classes from jdk.sandbox.java.util.json and jdk.sandbox.internal.util.json
static Set<Class<?>> discoverLocalJsonClasses() {
LOGGER.info("Starting class discovery for JSON API packages");
LOGGER.info(() -> "Starting class discovery for JSON API packages");
final var classes = new TreeSet<Class<?>>(Comparator.comparing(Class::getName));

// Packages to scan - only public API, not internal implementation
Expand Down Expand Up @@ -122,11 +122,11 @@ static Set<Class<?>> discoverLocalJsonClasses() {
}
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error scanning package: " + packageName, e);
LOGGER.warning(() -> "ERROR: Error scanning package: " + packageName + " - " + e.getMessage());
}
}

LOGGER.info("Discovered " + classes.size() + " classes in JSON API packages: " +
LOGGER.info(() -> "Discovered " + classes.size() + " classes in JSON API packages: " +
classes.stream().map(Class::getName).sorted().collect(Collectors.joining(", ")));
return Collections.unmodifiableSet(classes);
}
Expand All @@ -151,7 +151,7 @@ static void scanDirectory(java.io.File directory, String packageName, Set<Class<
try {
final var clazz = Class.forName(className);
classes.add(clazz);
LOGGER.info("Found class: " + className);
LOGGER.info(() -> "Found class: " + className);
} catch (ClassNotFoundException | NoClassDefFoundError e) {
LOGGER.fine(() -> "Could not load class: " + className);
}
Expand Down Expand Up @@ -189,15 +189,15 @@ static void scanJar(java.net.URL jarUrl, String packageName, Set<Class<?>> class
try {
final var clazz = Class.forName(className);
classes.add(clazz);
LOGGER.info("Found class in JAR: " + className);
LOGGER.info(() -> "Found class in JAR: " + className);
} catch (ClassNotFoundException | NoClassDefFoundError e) {
LOGGER.fine(() -> "Could not load class from JAR: " + className);
}
}
}
}
} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error scanning JAR: " + jarUrl, e);
LOGGER.warning(() -> "ERROR: Error scanning JAR: " + jarUrl + " - " + e.getMessage());
}
}

Expand All @@ -206,7 +206,7 @@ static void scanJar(java.net.URL jarUrl, String packageName, Set<Class<?>> class
/// @return map of className to source code (or error message if fetch failed)
static Map<String, String> fetchUpstreamSources(Set<Class<?>> localClasses) {
Objects.requireNonNull(localClasses, "localClasses must not be null");
LOGGER.info("Fetching upstream sources for " + localClasses.size() + " classes");
LOGGER.info(() -> "Fetching upstream sources for " + localClasses.size() + " classes");

final var results = new LinkedHashMap<String, String>();
final var httpClient = HttpClient.newBuilder()
Expand All @@ -227,7 +227,7 @@ static Map<String, String> fetchUpstreamSources(Set<Class<?>> localClasses) {
final var upstreamPath = mapToUpstreamPath(className);
final var url = GITHUB_BASE_URL + upstreamPath;

LOGGER.info("Fetching upstream source: " + url);
LOGGER.info(() -> "Fetching upstream source: " + url);

try {
final var request = HttpRequest.newBuilder()
Expand All @@ -242,20 +242,20 @@ static Map<String, String> fetchUpstreamSources(Set<Class<?>> localClasses) {
final var body = response.body();
FETCH_CACHE.put(className, body);
results.put(className, body);
LOGGER.info("Successfully fetched " + body.length() + " chars for: " + className);
LOGGER.info(() -> "Successfully fetched " + body.length() + " chars for: " + className);
} else if (response.statusCode() == 404) {
final var error = "NOT_FOUND: Upstream file not found (possibly deleted or renamed)";
results.put(className, error);
LOGGER.info("404 Not Found for upstream: " + className + " at " + url);
LOGGER.info(() -> "404 Not Found for upstream: " + className + " at " + url);
} else {
final var error = "HTTP_ERROR: Status " + response.statusCode();
results.put(className, error);
LOGGER.info("HTTP error " + response.statusCode() + " for " + className + " at " + url);
LOGGER.info(() -> "HTTP error " + response.statusCode() + " for " + className + " at " + url);
}
} catch (Exception e) {
final var error = "FETCH_ERROR: " + e.getMessage();
results.put(className, error);
LOGGER.info("Fetch error for " + className + " at " + url + ": " + e.getMessage());
LOGGER.info(() -> "Fetch error for " + className + " at " + url + ": " + e.getMessage());
}
}

Expand Down Expand Up @@ -306,7 +306,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) {
return JsonObject.of(errorMap);
}

LOGGER.info("Extracting upstream API for: " + className);
LOGGER.info(() -> "Extracting upstream API for: " + className);

final var compiler = ToolProvider.getSystemJavaCompiler();
if (compiler == null) {
Expand Down Expand Up @@ -367,7 +367,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) {
));

} catch (Exception e) {
LOGGER.log(Level.WARNING, "Error parsing upstream source for " + className, e);
LOGGER.warning(() -> "ERROR: Error parsing upstream source for " + className + " - " + e.getMessage());
return JsonObject.of(Map.of(
"error", JsonString.of("Parse error: " + e.getMessage()),
"className", JsonString.of(className)
Expand All @@ -376,7 +376,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) {
try {
fileManager.close();
} catch (IOException e) {
LOGGER.log(Level.FINE, "Error closing file manager", e);
LOGGER.fine(() -> "Error closing file manager: " + e.getMessage());
}
}
}
Expand Down Expand Up @@ -877,7 +877,7 @@ static String normalizeTypeName(String typeName) {
/// Runs source-to-source comparison for fair parameter name comparison
/// @return complete comparison report as JSON
static JsonObject runFullComparison() {
LOGGER.info("Starting full API comparison");
LOGGER.info(() -> "Starting full API comparison");
final var startTime = Instant.now();

final var reportMap = new LinkedHashMap<String, JsonValue>();
Expand All @@ -887,7 +887,7 @@ static JsonObject runFullComparison() {

// Discover local classes
final var localClasses = discoverLocalJsonClasses();
LOGGER.info("Found " + localClasses.size() + " local classes");
LOGGER.info(() -> "Found " + localClasses.size() + " local classes");

// Extract and compare APIs
final var differences = new ArrayList<JsonValue>();
Expand Down Expand Up @@ -927,7 +927,7 @@ static JsonObject runFullComparison() {
final var duration = Duration.between(startTime, Instant.now());
reportMap.put("durationMs", JsonNumber.of(duration.toMillis()));

LOGGER.info("Comparison completed in " + duration.toMillis() + "ms");
LOGGER.info(() -> "Comparison completed in " + duration.toMillis() + "ms");

return JsonObject.of(reportMap);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,19 @@ default ValidationResult validate(JsonValue json) {
Deque<ValidationFrame> stack = new ArrayDeque<>();
stack.push(new ValidationFrame("", this, json));

int frameCount = 0;
final int performanceThreshold = 1000; // Reasonable threshold for stack processing

while (!stack.isEmpty()) {
ValidationFrame frame = stack.pop();
frameCount++;

// Performance warning for deep validation trees
if (frameCount == performanceThreshold) {
LOG.fine(() -> "PERFORMANCE WARNING: Validation stack processing " + frameCount +
" items exceeds recommended threshold of " + performanceThreshold);
}

LOG.finest(() -> "POP " + frame.path() +
" schema=" + frame.schema().getClass().getSimpleName());
ValidationResult result = frame.schema.validateAt(frame.path, frame.json, stack);
Expand Down Expand Up @@ -448,9 +459,12 @@ private static void trace(String stage, JsonValue fragment) {
static JsonSchema compile(JsonValue schemaJson) {
definitions.clear(); // Clear any previous definitions
currentRootSchema = null;
LOG.config(() -> "Compiling JSON Schema with " +
(schemaJson instanceof JsonObject obj ? obj.members().size() + " properties" : "boolean schema"));
trace("compile-start", schemaJson);
JsonSchema schema = compileInternal(schemaJson);
currentRootSchema = schema; // Store the root schema for self-references
LOG.config(() -> "JSON Schema compilation completed - type: " + schema.getClass().getSimpleName());
return schema;
}

Expand Down