From aac56663474c30d3a6821247a2428ca1e94e5542 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 18 Sep 2025 09:34:07 +0000 Subject: [PATCH 1/2] Initial plan From 5c3435fbffb51063992507f0b456f94b66d829b7 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 18 Sep 2025 09:43:06 +0000 Subject: [PATCH 2/2] Issue #34 - Implement JUL logging strategy with lambda expressions and prefixes Co-authored-by: simbo1905 <322608+simbo1905@users.noreply.github.com> --- AGENTS.md | 71 +++++++++++++++++++ .../compatibility/JsonTestSuiteSummary.java | 4 +- .../compatibility/RobustCharDecoder.java | 12 ++-- .../compatibility/JsonTestSuiteTest.java | 6 +- .../github/simbo1905/tracker/ApiTracker.java | 36 +++++----- .../simbo1905/json/schema/JsonSchema.java | 14 ++++ 6 files changed, 114 insertions(+), 29 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 79dad69..76a6304 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -64,6 +64,77 @@ mvn exec:java -pl json-compatibility-suite -Dexec.args="--json" ./mvn-test-no-boilerplate.sh -Dtest=JsonParserTests -Djava.util.logging.ConsoleHandler.level=FINER ``` +## Logging Strategy + +This project uses `java.util.logging` (JUL) following Oracle's official guidelines with practical compatibility enhancements for cloud monitoring and SLF4J/Log4j bridge users. + +### Key Principles + +#### 1. Lambda-Based Logging Only +**ALL logging must use lambda expressions** for zero runtime overhead when log levels are disabled: +```java +// ✅ Correct - JVM optimizes away unused lambda branches +LOGGER.fine(() -> "Debug info: " + expensiveOperation()); + +// ❌ Wrong - string concatenation always executes +LOGGER.fine("Debug info: " + expensiveOperation()); +``` + +#### 2. ERROR Prefix for Cloud Monitoring +SEVERE level messages that warrant cloud log monitoring include `"ERROR:"` prefix for SLF4J/Log4j bridge compatibility: +```java +// Security vulnerabilities, parse failures preventing normal execution +LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on file: " + filename); +LOGGER.severe(() -> "ERROR: Failed to compile JSON schema: " + schemaUri); +``` + +#### 3. Performance Warning Prefix +Performance threshold violations use `"PERFORMANCE WARNING:"` prefix at FINE level: +```java +LOGGER.fine(() -> "PERFORMANCE WARNING: Validation stack processing " + count + + " items exceeds recommended threshold of " + threshold); +``` + +#### 4. Oracle JUL Level Hierarchy +- **SEVERE (1000)**: Serious failures preventing normal execution (+ ERROR prefix when appropriate) +- **WARNING (900)**: Potential problems for end users/system managers +- **INFO (800)**: Significant messages for end users/administrators (use sparingly) +- **CONFIG (700)**: Static configuration information for debugging setup issues +- **FINE (500)**: Developer debugging info, minor recoverable failures, performance warnings +- **FINER (400)**: Detailed tracing, method entry/exit, conditional validation branches +- **FINEST (300)**: Maximum detail debugging, stack operations, branch processing + +#### 5. Hierarchical Logger Naming +All loggers use fully-qualified class names: +```java +private static final Logger LOGGER = Logger.getLogger(MyClass.class.getName()); +``` + +### Example Usage +```java +// SEVERE with ERROR prefix - prevents normal execution +LOGGER.severe(() -> "ERROR: Failed to parse JSON document: " + error); + +// WARNING - recoverable issue +LOGGER.warning(() -> "Deprecated encoding detected, using compatibility mode"); + +// INFO - significant but not noisy +LOGGER.info(() -> "JSON parser initialized with " + features.size() + " features"); + +// CONFIG - configuration details +LOGGER.config(() -> "Validation features enabled: " + Arrays.toString(enabledFeatures)); + +// FINE - developer debugging and performance warnings +LOGGER.fine(() -> "PERFORMANCE WARNING: Deep recursion detected at depth " + depth); +LOGGER.fine(() -> "Cache miss for schema: " + schemaId); + +// FINER - detailed tracing +LOGGER.finer(() -> "Entering validation method for schema: " + schemaType); + +// FINEST - maximum detail (JVM optimizes away when disabled) +LOGGER.finest(() -> "Stack frame: " + frameDetails); +``` + ## Releasing to Maven Central Prerequisites diff --git a/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/JsonTestSuiteSummary.java b/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/JsonTestSuiteSummary.java index aeab23a..e9af44d 100644 --- a/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/JsonTestSuiteSummary.java +++ b/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/JsonTestSuiteSummary.java @@ -106,7 +106,7 @@ private TestResults runTests() throws Exception { content = Files.readString(file, StandardCharsets.UTF_8); charContent = content.toCharArray(); } catch (MalformedInputException e) { - LOGGER.warning("UTF-8 failed for " + filename + ", using robust encoding detection"); + LOGGER.warning(() -> "UTF-8 failed for " + filename + ", using robust encoding detection"); try { byte[] rawBytes = Files.readAllBytes(file); charContent = RobustCharDecoder.decodeToChars(rawBytes, filename); @@ -123,7 +123,7 @@ private TestResults runTests() throws Exception { } catch (JsonParseException e) { parseSucceeded = false; } catch (StackOverflowError e) { - LOGGER.warning("StackOverflowError on file: " + filename); + LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on file: " + filename); parseSucceeded = false; // Treat as parse failure } diff --git a/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/RobustCharDecoder.java b/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/RobustCharDecoder.java index deb4607..a7eb308 100644 --- a/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/RobustCharDecoder.java +++ b/json-compatibility-suite/src/main/java/jdk/sandbox/compatibility/RobustCharDecoder.java @@ -29,12 +29,12 @@ class RobustCharDecoder { /// @param filename filename for logging purposes /// @return char array representing the content static char[] decodeToChars(byte[] rawBytes, String filename) { - LOGGER.fine("Attempting robust decoding for " + filename + " (" + rawBytes.length + " bytes)"); + LOGGER.fine(() -> "Attempting robust decoding for " + filename + " (" + rawBytes.length + " bytes)"); // Stage 1: BOM Detection BomResult bom = detectBOM(rawBytes); if (bom.encoding != null) { - LOGGER.fine("BOM detected for " + filename + ": " + bom.encoding.name()); + LOGGER.fine(() -> "BOM detected for " + filename + ": " + bom.encoding.name()); char[] result = tryDecodeWithCharset(rawBytes, bom.offset, bom.encoding, filename); if (result != null) { return result; @@ -53,13 +53,13 @@ static char[] decodeToChars(byte[] rawBytes, String filename) { for (Charset encoding : encodings) { char[] result = tryDecodeWithCharset(rawBytes, 0, encoding, filename); if (result != null) { - LOGGER.fine("Successfully decoded " + filename + " using " + encoding.name()); + LOGGER.fine(() -> "Successfully decoded " + filename + " using " + encoding.name()); return result; } } // Stage 3: Byte-level conversion with UTF-8 sequence awareness - LOGGER.fine("Using permissive byte-to-char conversion for " + filename); + LOGGER.fine(() -> "Using permissive byte-to-char conversion for " + filename); return convertBytesToCharsPermissively(rawBytes); } @@ -96,10 +96,10 @@ private static char[] tryDecodeWithCharset(byte[] bytes, int offset, Charset cha return result; } catch (CharacterCodingException e) { - LOGGER.fine("Failed to decode " + filename + " with " + charset.name() + ": " + e.getMessage()); + LOGGER.fine(() -> "Failed to decode " + filename + " with " + charset.name() + ": " + e.getMessage()); return null; } catch (Exception e) { - LOGGER.fine("Unexpected error decoding " + filename + " with " + charset.name() + ": " + e.getMessage()); + LOGGER.fine(() -> "Unexpected error decoding " + filename + " with " + charset.name() + ": " + e.getMessage()); return null; } } diff --git a/json-compatibility-suite/src/test/java/jdk/sandbox/compatibility/JsonTestSuiteTest.java b/json-compatibility-suite/src/test/java/jdk/sandbox/compatibility/JsonTestSuiteTest.java index 88a04bc..a46b809 100644 --- a/json-compatibility-suite/src/test/java/jdk/sandbox/compatibility/JsonTestSuiteTest.java +++ b/json-compatibility-suite/src/test/java/jdk/sandbox/compatibility/JsonTestSuiteTest.java @@ -51,7 +51,7 @@ private DynamicTest createTest(Path file) { content = Files.readString(file, StandardCharsets.UTF_8); charContent = content.toCharArray(); } catch (MalformedInputException e) { - LOGGER.warning("UTF-8 failed for " + filename + ", using robust encoding detection"); + LOGGER.warning(() -> "UTF-8 failed for " + filename + ", using robust encoding detection"); try { byte[] rawBytes = Files.readAllBytes(file); charContent = RobustCharDecoder.decodeToChars(rawBytes, filename); @@ -128,8 +128,8 @@ private void testImplementationDefinedSingle(String description, Runnable parseA } catch (JsonParseException e) { // OK - we rejected it } catch (StackOverflowError e) { - // OK - acceptable for deeply nested structures - LOGGER.warning("StackOverflowError on implementation-defined: " + description); + // OK - acceptable for deeply nested structures but still a security concern + LOGGER.severe(() -> "ERROR: StackOverflowError security vulnerability on implementation-defined: " + description); } catch (Exception e) { // NOT OK - unexpected exception type fail("Unexpected exception for %s: %s", description, e); diff --git a/json-java21-api-tracker/src/main/java/io/github/simbo1905/tracker/ApiTracker.java b/json-java21-api-tracker/src/main/java/io/github/simbo1905/tracker/ApiTracker.java index 2596c39..df18509 100644 --- a/json-java21-api-tracker/src/main/java/io/github/simbo1905/tracker/ApiTracker.java +++ b/json-java21-api-tracker/src/main/java/io/github/simbo1905/tracker/ApiTracker.java @@ -94,7 +94,7 @@ static String fetchFromUrl(String url) { /// Discovers all classes in the local JSON API packages /// @return sorted set of classes from jdk.sandbox.java.util.json and jdk.sandbox.internal.util.json static Set> discoverLocalJsonClasses() { - LOGGER.info("Starting class discovery for JSON API packages"); + LOGGER.info(() -> "Starting class discovery for JSON API packages"); final var classes = new TreeSet>(Comparator.comparing(Class::getName)); // Packages to scan - only public API, not internal implementation @@ -122,11 +122,11 @@ static Set> discoverLocalJsonClasses() { } } } catch (Exception e) { - LOGGER.log(Level.WARNING, "Error scanning package: " + packageName, e); + LOGGER.warning(() -> "ERROR: Error scanning package: " + packageName + " - " + e.getMessage()); } } - LOGGER.info("Discovered " + classes.size() + " classes in JSON API packages: " + + LOGGER.info(() -> "Discovered " + classes.size() + " classes in JSON API packages: " + classes.stream().map(Class::getName).sorted().collect(Collectors.joining(", "))); return Collections.unmodifiableSet(classes); } @@ -151,7 +151,7 @@ static void scanDirectory(java.io.File directory, String packageName, Set "Found class: " + className); } catch (ClassNotFoundException | NoClassDefFoundError e) { LOGGER.fine(() -> "Could not load class: " + className); } @@ -189,7 +189,7 @@ static void scanJar(java.net.URL jarUrl, String packageName, Set> class try { final var clazz = Class.forName(className); classes.add(clazz); - LOGGER.info("Found class in JAR: " + className); + LOGGER.info(() -> "Found class in JAR: " + className); } catch (ClassNotFoundException | NoClassDefFoundError e) { LOGGER.fine(() -> "Could not load class from JAR: " + className); } @@ -197,7 +197,7 @@ static void scanJar(java.net.URL jarUrl, String packageName, Set> class } } } catch (Exception e) { - LOGGER.log(Level.WARNING, "Error scanning JAR: " + jarUrl, e); + LOGGER.warning(() -> "ERROR: Error scanning JAR: " + jarUrl + " - " + e.getMessage()); } } @@ -206,7 +206,7 @@ static void scanJar(java.net.URL jarUrl, String packageName, Set> class /// @return map of className to source code (or error message if fetch failed) static Map fetchUpstreamSources(Set> localClasses) { Objects.requireNonNull(localClasses, "localClasses must not be null"); - LOGGER.info("Fetching upstream sources for " + localClasses.size() + " classes"); + LOGGER.info(() -> "Fetching upstream sources for " + localClasses.size() + " classes"); final var results = new LinkedHashMap(); final var httpClient = HttpClient.newBuilder() @@ -227,7 +227,7 @@ static Map fetchUpstreamSources(Set> localClasses) { final var upstreamPath = mapToUpstreamPath(className); final var url = GITHUB_BASE_URL + upstreamPath; - LOGGER.info("Fetching upstream source: " + url); + LOGGER.info(() -> "Fetching upstream source: " + url); try { final var request = HttpRequest.newBuilder() @@ -242,20 +242,20 @@ static Map fetchUpstreamSources(Set> localClasses) { final var body = response.body(); FETCH_CACHE.put(className, body); results.put(className, body); - LOGGER.info("Successfully fetched " + body.length() + " chars for: " + className); + LOGGER.info(() -> "Successfully fetched " + body.length() + " chars for: " + className); } else if (response.statusCode() == 404) { final var error = "NOT_FOUND: Upstream file not found (possibly deleted or renamed)"; results.put(className, error); - LOGGER.info("404 Not Found for upstream: " + className + " at " + url); + LOGGER.info(() -> "404 Not Found for upstream: " + className + " at " + url); } else { final var error = "HTTP_ERROR: Status " + response.statusCode(); results.put(className, error); - LOGGER.info("HTTP error " + response.statusCode() + " for " + className + " at " + url); + LOGGER.info(() -> "HTTP error " + response.statusCode() + " for " + className + " at " + url); } } catch (Exception e) { final var error = "FETCH_ERROR: " + e.getMessage(); results.put(className, error); - LOGGER.info("Fetch error for " + className + " at " + url + ": " + e.getMessage()); + LOGGER.info(() -> "Fetch error for " + className + " at " + url + ": " + e.getMessage()); } } @@ -306,7 +306,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) { return JsonObject.of(errorMap); } - LOGGER.info("Extracting upstream API for: " + className); + LOGGER.info(() -> "Extracting upstream API for: " + className); final var compiler = ToolProvider.getSystemJavaCompiler(); if (compiler == null) { @@ -367,7 +367,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) { )); } catch (Exception e) { - LOGGER.log(Level.WARNING, "Error parsing upstream source for " + className, e); + LOGGER.warning(() -> "ERROR: Error parsing upstream source for " + className + " - " + e.getMessage()); return JsonObject.of(Map.of( "error", JsonString.of("Parse error: " + e.getMessage()), "className", JsonString.of(className) @@ -376,7 +376,7 @@ static JsonObject extractApiFromSource(String sourceCode, String className) { try { fileManager.close(); } catch (IOException e) { - LOGGER.log(Level.FINE, "Error closing file manager", e); + LOGGER.fine(() -> "Error closing file manager: " + e.getMessage()); } } } @@ -877,7 +877,7 @@ static String normalizeTypeName(String typeName) { /// Runs source-to-source comparison for fair parameter name comparison /// @return complete comparison report as JSON static JsonObject runFullComparison() { - LOGGER.info("Starting full API comparison"); + LOGGER.info(() -> "Starting full API comparison"); final var startTime = Instant.now(); final var reportMap = new LinkedHashMap(); @@ -887,7 +887,7 @@ static JsonObject runFullComparison() { // Discover local classes final var localClasses = discoverLocalJsonClasses(); - LOGGER.info("Found " + localClasses.size() + " local classes"); + LOGGER.info(() -> "Found " + localClasses.size() + " local classes"); // Extract and compare APIs final var differences = new ArrayList(); @@ -927,7 +927,7 @@ static JsonObject runFullComparison() { final var duration = Duration.between(startTime, Instant.now()); reportMap.put("durationMs", JsonNumber.of(duration.toMillis())); - LOGGER.info("Comparison completed in " + duration.toMillis() + "ms"); + LOGGER.info(() -> "Comparison completed in " + duration.toMillis() + "ms"); return JsonObject.of(reportMap); } diff --git a/json-java21-schema/src/main/java/io/github/simbo1905/json/schema/JsonSchema.java b/json-java21-schema/src/main/java/io/github/simbo1905/json/schema/JsonSchema.java index 3c3c7ad..1696103 100644 --- a/json-java21-schema/src/main/java/io/github/simbo1905/json/schema/JsonSchema.java +++ b/json-java21-schema/src/main/java/io/github/simbo1905/json/schema/JsonSchema.java @@ -84,8 +84,19 @@ default ValidationResult validate(JsonValue json) { Deque stack = new ArrayDeque<>(); stack.push(new ValidationFrame("", this, json)); + int frameCount = 0; + final int performanceThreshold = 1000; // Reasonable threshold for stack processing + while (!stack.isEmpty()) { ValidationFrame frame = stack.pop(); + frameCount++; + + // Performance warning for deep validation trees + if (frameCount == performanceThreshold) { + LOG.fine(() -> "PERFORMANCE WARNING: Validation stack processing " + frameCount + + " items exceeds recommended threshold of " + performanceThreshold); + } + LOG.finest(() -> "POP " + frame.path() + " schema=" + frame.schema().getClass().getSimpleName()); ValidationResult result = frame.schema.validateAt(frame.path, frame.json, stack); @@ -448,9 +459,12 @@ private static void trace(String stage, JsonValue fragment) { static JsonSchema compile(JsonValue schemaJson) { definitions.clear(); // Clear any previous definitions currentRootSchema = null; + LOG.config(() -> "Compiling JSON Schema with " + + (schemaJson instanceof JsonObject obj ? obj.members().size() + " properties" : "boolean schema")); trace("compile-start", schemaJson); JsonSchema schema = compileInternal(schemaJson); currentRootSchema = schema; // Store the root schema for self-references + LOG.config(() -> "JSON Schema compilation completed - type: " + schema.getClass().getSimpleName()); return schema; }