This is an automated email from the ASF dual-hosted git repository.

robertlazarski pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/axis-axis2-java-core.git


The following commit(s) were added to refs/heads/master by this push:
     new 64f835429c Add recursive multi-level dot-notation field filtering
64f835429c is described below

commit 64f835429cd0591deb319b78308c51102622edbf
Author: Robert Lazarski <[email protected]>
AuthorDate: Tue Apr 21 17:07:16 2026 -1000

    Add recursive multi-level dot-notation field filtering
    
    Extends ?fields= to support arbitrary depth: ?fields=data.calcs.ticker
    walks into "data" (Map), then "calcs" (List of Maps), then filters
    each element to keep only "ticker" — removing 126 of 127 fields per
    element without any service-side changes.
    
    The key change: writeFilteredNested now re-parses its sub-fields for
    dots at each level and recurses. A new writeFilteredMap method handles
    Map<String, Object> containers with recursive descent — the pattern
    used by JSON-RPC services that return parsed JSON as nested Maps.
    
    This enables 97%+ payload reduction on services returning large nested
    Map/Collection structures (e.g., 5.4MB response with 127 fields per
    record filtered to ~150KB keeping 5 fields).
    
    New unit tests (3 tests modeling Map-of-List-of-Map pattern):
    - testTwoLevelDotNotationFiltersMapsRecursively
    - testTwoLevelDotNotationMultipleSubFields
    - testTwoLevelDotNotation97PercentReduction
    
    The Axis2/C implementation supports single-level dot-notation only.
    Multi-level is an Axis2/Java extension documented in the Javadoc.
    
    Co-Authored-By: Claude Opus 4.6 (1M context) <[email protected]>
---
 .../streaming/JSONStreamingMessageFormatter.java   | 102 +++++++---
 .../streaming/MoshiStreamingMessageFormatter.java  | 211 +++++++++++++++------
 .../FieldFilteringMessageFormatterTest.java        | 156 ++++++++++++++-
 3 files changed, 375 insertions(+), 94 deletions(-)

diff --git 
a/modules/json/src/org/apache/axis2/json/streaming/JSONStreamingMessageFormatter.java
 
b/modules/json/src/org/apache/axis2/json/streaming/JSONStreamingMessageFormatter.java
index 7a49cf1e36..2214a0ce45 100644
--- 
a/modules/json/src/org/apache/axis2/json/streaming/JSONStreamingMessageFormatter.java
+++ 
b/modules/json/src/org/apache/axis2/json/streaming/JSONStreamingMessageFormatter.java
@@ -329,63 +329,110 @@ public class JSONStreamingMessageFormatter implements 
MessageFormatter {
     }
 
     /**
-     * Serialize a nested field (Collection, Map, or single POJO) with only
-     * the specified sub-fields. GSON equivalent of the Moshi
-     * {@code writeFilteredNested} method.
+     * Serialize a nested field with recursive dot-notation support (GSON).
+     * Mirrors the Moshi {@code writeFilteredNested} — parses sub-fields
+     * for dots at each level and recurses into Maps, Collections, and POJOs.
      */
     private void writeFilteredNestedGson(JsonWriter jsonWriter, Object value,
                                          Set<String> subFields, Gson gson,
                                          java.util.Map<Class<?>, List<Field>> 
fieldCache)
             throws IOException {
 
+        // Parse sub-fields into immediate keeps and deeper specs
+        Set<String> immediateKeep = new LinkedHashSet<>();
+        java.util.Map<String, Set<String>> deeperSpecs = new 
java.util.LinkedHashMap<>();
+        for (String spec : subFields) {
+            int dot = spec.indexOf('.');
+            if (dot > 0 && dot < spec.length() - 1) {
+                String container = spec.substring(0, dot);
+                String remainder = spec.substring(dot + 1);
+                immediateKeep.add(container);
+                deeperSpecs.computeIfAbsent(container, k -> new 
LinkedHashSet<>())
+                    .add(remainder);
+            } else {
+                immediateKeep.add(spec);
+            }
+        }
+
         if (value instanceof java.util.Collection) {
             jsonWriter.beginArray();
             for (Object element : (java.util.Collection<?>) value) {
                 if (element == null) {
                     jsonWriter.nullValue();
-                } else {
-                    writeFilteredSingleObjectGson(jsonWriter, element, 
subFields,
+                } else if (element instanceof java.util.Map) {
+                    writeFilteredMapGson(jsonWriter, (java.util.Map<?, ?>) 
element,
+                        immediateKeep, deeperSpecs, gson, fieldCache);
+                } else if (element instanceof java.util.Collection) {
+                    writeFilteredNestedGson(jsonWriter, element, subFields,
                         gson, fieldCache);
+                } else {
+                    writeFilteredPojoGson(jsonWriter, element,
+                        immediateKeep, deeperSpecs, gson, fieldCache);
                 }
             }
             jsonWriter.endArray();
         } else if (value instanceof java.util.Map) {
-            jsonWriter.beginObject();
-            for (java.util.Map.Entry<?, ?> entry : ((java.util.Map<?, ?>) 
value).entrySet()) {
-                String key = String.valueOf(entry.getKey());
-                if (subFields.contains(key)) {
-                    jsonWriter.name(key);
-                    gson.toJson(entry.getValue(), Object.class, jsonWriter);
-                }
-            }
-            jsonWriter.endObject();
+            writeFilteredMapGson(jsonWriter, (java.util.Map<?, ?>) value,
+                immediateKeep, deeperSpecs, gson, fieldCache);
         } else if (value.getClass().getName().startsWith("java.lang.")) {
             gson.toJson(value, value.getClass(), jsonWriter);
         } else {
-            writeFilteredSingleObjectGson(jsonWriter, value, subFields, gson, 
fieldCache);
+            writeFilteredPojoGson(jsonWriter, value,
+                immediateKeep, deeperSpecs, gson, fieldCache);
         }
     }
 
     /**
-     * Serialize a single object with only the specified fields using GSON.
-     * Inner loop of nested filtering — called once per collection element.
+     * Serialize a Map with recursive field filtering (GSON).
+     * Mirrors the Moshi {@code writeFilteredMap}.
      */
-    private void writeFilteredSingleObjectGson(JsonWriter jsonWriter, Object 
obj,
-                                               Set<String> allowedFields, Gson 
gson,
-                                               java.util.Map<Class<?>, 
List<Field>> fieldCache)
+    private void writeFilteredMapGson(JsonWriter jsonWriter, java.util.Map<?, 
?> map,
+                                      Set<String> immediateKeep,
+                                      java.util.Map<String, Set<String>> 
deeperSpecs,
+                                      Gson gson,
+                                      java.util.Map<Class<?>, List<Field>> 
fieldCache)
+            throws IOException {
+
+        jsonWriter.beginObject();
+        for (java.util.Map.Entry<?, ?> entry : map.entrySet()) {
+            String key = String.valueOf(entry.getKey());
+            if (!immediateKeep.contains(key)) continue;
+
+            jsonWriter.name(key);
+            Object entryValue = entry.getValue();
+            Set<String> deeper = deeperSpecs.get(key);
+            if (deeper != null && entryValue != null) {
+                writeFilteredNestedGson(jsonWriter, entryValue, deeper, gson, 
fieldCache);
+            } else if (entryValue == null) {
+                jsonWriter.nullValue();
+            } else {
+                gson.toJson(entryValue, Object.class, jsonWriter);
+            }
+        }
+        jsonWriter.endObject();
+    }
+
+    /**
+     * Serialize a POJO with recursive field filtering (GSON).
+     * Mirrors the Moshi {@code writeFilteredPojo}.
+     */
+    private void writeFilteredPojoGson(JsonWriter jsonWriter, Object pojo,
+                                       Set<String> immediateKeep,
+                                       java.util.Map<String, Set<String>> 
deeperSpecs,
+                                       Gson gson,
+                                       java.util.Map<Class<?>, List<Field>> 
fieldCache)
             throws IOException {
 
         List<Field> fields = fieldCache.computeIfAbsent(
-            obj.getClass(), JSONStreamingMessageFormatter::getAllFields);
+            pojo.getClass(), JSONStreamingMessageFormatter::getAllFields);
         jsonWriter.beginObject();
         for (Field field : fields) {
-            if (!allowedFields.contains(field.getName())) {
-                continue;
-            }
+            if (!immediateKeep.contains(field.getName())) continue;
+
             Object value;
             try {
                 field.setAccessible(true);
-                value = field.get(obj);
+                value = field.get(pojo);
             } catch (IllegalAccessException | SecurityException e) {
                 log.warn("Cannot access field "
                     + field.getDeclaringClass().getName().replaceAll("[\r\n]", 
"_")
@@ -394,7 +441,10 @@ public class JSONStreamingMessageFormatter implements 
MessageFormatter {
                 continue;
             }
             jsonWriter.name(field.getName());
-            if (value == null) {
+            Set<String> deeper = deeperSpecs != null ? 
deeperSpecs.get(field.getName()) : null;
+            if (deeper != null && value != null) {
+                writeFilteredNestedGson(jsonWriter, value, deeper, gson, 
fieldCache);
+            } else if (value == null) {
                 jsonWriter.nullValue();
             } else {
                 gson.toJson(value, field.getGenericType(), jsonWriter);
diff --git 
a/modules/json/src/org/apache/axis2/json/streaming/MoshiStreamingMessageFormatter.java
 
b/modules/json/src/org/apache/axis2/json/streaming/MoshiStreamingMessageFormatter.java
index c98b7caa51..b966b832f5 100644
--- 
a/modules/json/src/org/apache/axis2/json/streaming/MoshiStreamingMessageFormatter.java
+++ 
b/modules/json/src/org/apache/axis2/json/streaming/MoshiStreamingMessageFormatter.java
@@ -276,12 +276,17 @@ public class MoshiStreamingMessageFormatter implements 
MessageFormatter {
      * bindings. The streaming pipeline (Moshi → Okio → FlushingOutputStream)
      * is preserved — no capture buffer is used.</p>
      *
-     * <p><b>Nesting depth:</b> One level of dot-notation is supported
-     * ({@code container.field}). Multi-level paths like {@code a.b.c} are
-     * not supported — the sub-field {@code "b.c"} is treated as a literal
-     * field name, not a nested path. This matches the Axis2/C implementation
-     * and covers the primary use case of filtering wide objects inside a
-     * top-level collection.</p>
+     * <p><b>Nesting depth:</b> Multi-level dot-notation is supported.
+     * {@code ?fields=data.records.id} walks three levels deep:
+     * keep "data" at top level, keep "records" inside data, keep
+     * "id" inside each records element. This enables filtering
+     * inside deeply nested Map/Collection structures without requiring
+     * service-side changes. The Axis2/C implementation supports single-
+     * level dot-notation only.</p>
+     *
+     * <p><b>Limitation:</b> Field names that contain a literal dot character
+     * cannot be selected, as the dot is always interpreted as a nesting
+     * delimiter.</p>
      */
     private void writeFilteredObject(JsonWriter jsonWriter, Object retObj,
                                      Set<String> allowedFields)
@@ -419,104 +424,184 @@ public class MoshiStreamingMessageFormatter implements 
MessageFormatter {
      * <p>Designed to handle both object and array containers, compatible
      * with nested field filtering logic in other Axis2 language bindings.</p>
      */
+    /**
+     * Serialize a nested field with recursive dot-notation support.
+     *
+     * <p>Sub-fields may themselves contain dots for multi-level filtering.
+     * For example, with the JSON-RPC service response pattern:</p>
+     * <pre>{@code
+     * {"response": {
+     *     "status": "SUCCESS",
+     *     "data": {                          // Map<String, Object>
+     *       "records": [            // List<Map<String, Object>>
+     *         {"id":"item-1", "name":"Widget A", ... 125 more ...},
+     *         {"id":"item-2", "name":"Widget B", ... 125 more ...}
+     *       ],
+     *       "notes": [...],
+     *       "diagnostics": {...}
+     *     }
+     * }}
+     * }</pre>
+     *
+     * <p>The query {@code ?fields=status,data.records.id}
+     * produces:</p>
+     * <ol>
+     *   <li>Top level: keep "status" and "data"</li>
+     *   <li>Inside "data": keep only "records"</li>
+     *   <li>Inside each "records" element: keep only "id"</li>
+     * </ol>
+     *
+     * <p>Result: {@code {"response":{"status":"SUCCESS","data":
+     * {"records":[{"id":"item-1"},{"id":"item-2"}]}}}}</p>
+     */
     @SuppressWarnings("unchecked")
     private void writeFilteredNested(JsonWriter jsonWriter, Object value,
                                      Set<String> subFields, Type declaredType,
                                      java.util.Map<Class<?>, List<Field>> 
fieldCache)
             throws IOException {
 
+        /*
+         * Before processing, check if any sub-fields contain dots —
+         * meaning we need to recurse deeper. Parse into immediate-level
+         * keeps and deeper nested specs, same pattern as writeFilteredObject.
+         *
+         * Example: subFields = {"records.id", "records.name"}
+         *   immediateKeep = {"records"}
+         *   deeperSpecs   = {"records" -> {"id", "name"}}
+         */
+        Set<String> immediateKeep = new LinkedHashSet<>();
+        java.util.Map<String, Set<String>> deeperSpecs = new 
java.util.LinkedHashMap<>();
+
+        for (String spec : subFields) {
+            int dot = spec.indexOf('.');
+            if (dot > 0 && dot < spec.length() - 1) {
+                String container = spec.substring(0, dot);
+                String remainder = spec.substring(dot + 1);
+                immediateKeep.add(container);
+                deeperSpecs.computeIfAbsent(container, k -> new 
LinkedHashSet<>())
+                    .add(remainder);
+            } else {
+                immediateKeep.add(spec);
+            }
+        }
+
         if (value instanceof java.util.Collection) {
             /*
-             * Array of objects — the primary use case.
-             *
-             * Example: List<Record> with 127 fields per element.
-             * With subFields = {"id", "name"}, each element is filtered
-             * from 127 fields down to 2. The array structure is preserved.
+             * Array of objects — filter each element independently.
+             * If there are deeper specs, each element is filtered recursively.
              */
             jsonWriter.beginArray();
             for (Object element : (java.util.Collection<?>) value) {
                 if (element == null) {
                     jsonWriter.nullValue();
+                } else if (element instanceof java.util.Map) {
+                    writeFilteredMap(jsonWriter, (java.util.Map<?, ?>) element,
+                        immediateKeep, deeperSpecs, fieldCache);
+                } else if (element instanceof java.util.Collection) {
+                    // Nested collection — recurse with the same sub-fields
+                    writeFilteredNested(jsonWriter, element, subFields,
+                        Object.class, fieldCache);
                 } else {
-                    writeFilteredSingleObject(jsonWriter, element, subFields, 
fieldCache);
+                    writeFilteredPojo(jsonWriter, element,
+                        immediateKeep, deeperSpecs, fieldCache);
                 }
             }
             jsonWriter.endArray();
 
         } else if (value instanceof java.util.Map) {
             /*
-             * Map — filter by key name.
-             *
-             * Example: Map<String, Object> with keys "id", "name", 
"category", ...
-             * With subFields = {"id"}, only the "id" entry is written.
+             * Map — the JSON-RPC service pattern. The "data" field is a 
Map<String, Object>
+             * where keys are "records", "metadata", "diagnostics", etc.
+             * Filter keys by immediateKeep, then recurse into deeperSpecs.
              */
-            jsonWriter.beginObject();
-            for (java.util.Map.Entry<?, ?> entry : ((java.util.Map<?, ?>) 
value).entrySet()) {
-                String key = String.valueOf(entry.getKey());
-                if (subFields.contains(key)) {
-                    jsonWriter.name(key);
-                    JsonAdapter<Object> valAdapter =
-                        (JsonAdapter<Object>) 
FIELD_FILTER_MOSHI.adapter(Object.class);
-                    valAdapter.toJson(jsonWriter, entry.getValue());
-                }
-            }
-            jsonWriter.endObject();
+            writeFilteredMap(jsonWriter, (java.util.Map<?, ?>) value,
+                immediateKeep, deeperSpecs, fieldCache);
 
         } else if (value.getClass().getName().startsWith("java.lang.")) {
-            /*
-             * Scalar (String, Integer, Double, etc.) — nothing to filter
-             * inside a primitive value. Serialize as-is.
-             *
-             * This handles the edge case of ?fields=status.foo where "status"
-             * is a String, not an object with sub-fields.
-             */
+            /* Scalar — nothing to filter inside. */
             JsonAdapter<Object> adapter =
                 (JsonAdapter<Object>) FIELD_FILTER_MOSHI.adapter(declaredType);
             adapter.toJson(jsonWriter, value);
 
         } else {
-            /*
-             * Single POJO — filter its declared fields.
-             *
-             * Example: a response with a single nested object (not an array):
-             * {"results": {"id":"item-1", "name":"Widget A", ...}}
-             * With subFields = {"id"}, outputs {"id":"item-1"}.
-             */
-            writeFilteredSingleObject(jsonWriter, value, subFields, 
fieldCache);
+            /* Single POJO — filter its declared fields recursively. */
+            writeFilteredPojo(jsonWriter, value, immediateKeep, deeperSpecs, 
fieldCache);
         }
     }
 
     /**
-     * Serialize a single object with only the specified fields included.
+     * Serialize a Map with field filtering and recursive dot-notation.
      *
-     * <p>Used for both standalone nested objects and individual elements
-     * within a filtered collection. Uses the request-scoped field cache
-     * to avoid repeated reflection on the same class — critical when
-     * filtering a 500-element collection where every element is the
-     * same type.</p>
+     * <p>This is the core of JSON-RPC service response filtering. A Map like
+     * {@code {"records":[...], "metadata":[...], "diagnostics":{...}}}
+     * is filtered to keep only the keys in {@code immediateKeep}. For keys
+     * that have {@code deeperSpecs}, the value is recursively filtered.</p>
+     */
+    @SuppressWarnings("unchecked")
+    private void writeFilteredMap(JsonWriter jsonWriter, java.util.Map<?, ?> 
map,
+                                  Set<String> immediateKeep,
+                                  java.util.Map<String, Set<String>> 
deeperSpecs,
+                                  java.util.Map<Class<?>, List<Field>> 
fieldCache)
+            throws IOException {
+
+        jsonWriter.beginObject();
+        for (java.util.Map.Entry<?, ?> entry : map.entrySet()) {
+            String key = String.valueOf(entry.getKey());
+
+            if (!immediateKeep.contains(key)) {
+                continue;  // Key not requested — skip
+            }
+
+            jsonWriter.name(key);
+            Object entryValue = entry.getValue();
+
+            Set<String> deeper = deeperSpecs.get(key);
+            if (deeper != null && entryValue != null) {
+                // This key has deeper sub-field specs — recurse
+                writeFilteredNested(jsonWriter, entryValue, deeper,
+                    Object.class, fieldCache);
+            } else if (entryValue == null) {
+                jsonWriter.nullValue();
+            } else {
+                // No deeper filtering — serialize the full value
+                JsonAdapter<Object> valAdapter =
+                    (JsonAdapter<Object>) 
FIELD_FILTER_MOSHI.adapter(Object.class);
+                valAdapter.toJson(jsonWriter, entryValue);
+            }
+        }
+        jsonWriter.endObject();
+    }
+
+    /**
+     * Serialize a POJO with recursive field filtering.
+     *
+     * <p>Mirrors {@link #writeFilteredMap} but operates on POJO fields via
+     * reflection. For each field in {@code immediateKeep}, checks if there
+     * are {@code deeperSpecs} and recurses into nested structures.</p>
      *
-     * <p>This is the inner loop of nested filtering — called once per
-     * array element. For a 500-element collection with 127 fields each,
-     * this method is called 500 times, each time skipping ~125 fields
-     * and serializing ~2.</p>
+     * <p>Used for both standalone nested POJOs and individual elements
+     * within a filtered collection. Uses the request-scoped field cache
+     * to avoid repeated reflection — critical when filtering a 500-element
+     * collection where every element is the same type.</p>
      */
     @SuppressWarnings("unchecked")
-    private void writeFilteredSingleObject(JsonWriter jsonWriter, Object obj,
-                                           Set<String> allowedFields,
-                                           java.util.Map<Class<?>, 
List<Field>> fieldCache)
+    private void writeFilteredPojo(JsonWriter jsonWriter, Object pojo,
+                                   Set<String> immediateKeep,
+                                   java.util.Map<String, Set<String>> 
deeperSpecs,
+                                   java.util.Map<Class<?>, List<Field>> 
fieldCache)
             throws IOException {
 
         List<Field> fields = fieldCache.computeIfAbsent(
-            obj.getClass(), MoshiStreamingMessageFormatter::getAllFields);
+            pojo.getClass(), MoshiStreamingMessageFormatter::getAllFields);
         jsonWriter.beginObject();
         for (Field field : fields) {
-            if (!allowedFields.contains(field.getName())) {
-                continue;  // Skip — this field was not requested
+            if (!immediateKeep.contains(field.getName())) {
+                continue;
             }
             Object value;
             try {
                 field.setAccessible(true);
-                value = field.get(obj);
+                value = field.get(pojo);
             } catch (IllegalAccessException | SecurityException e) {
                 log.warn("Cannot access field "
                     + field.getDeclaringClass().getName().replaceAll("[\r\n]", 
"_")
@@ -524,8 +609,14 @@ public class MoshiStreamingMessageFormatter implements 
MessageFormatter {
                     + " for nested field filtering; skipping", e);
                 continue;
             }
+
             jsonWriter.name(field.getName());
-            if (value == null) {
+            Set<String> deeper = deeperSpecs != null ? 
deeperSpecs.get(field.getName()) : null;
+            if (deeper != null && value != null) {
+                // Recurse into this field's value
+                writeFilteredNested(jsonWriter, value, deeper,
+                    field.getGenericType(), fieldCache);
+            } else if (value == null) {
                 jsonWriter.nullValue();
             } else {
                 JsonAdapter<Object> adapter =
diff --git 
a/modules/json/test/org/apache/axis2/json/streaming/FieldFilteringMessageFormatterTest.java
 
b/modules/json/test/org/apache/axis2/json/streaming/FieldFilteringMessageFormatterTest.java
index aacb835e9e..18a8f6916b 100644
--- 
a/modules/json/test/org/apache/axis2/json/streaming/FieldFilteringMessageFormatterTest.java
+++ 
b/modules/json/test/org/apache/axis2/json/streaming/FieldFilteringMessageFormatterTest.java
@@ -77,7 +77,7 @@ public class FieldFilteringMessageFormatterTest {
 
     @Test
     public void testFilterKeepsSelectedFields() throws Exception {
-        setReturnObject(new PortfolioData("SUCCESS", 0.025, 0.157, 1));
+        setReturnObject(new SampleData("SUCCESS", 0.025, 0.157, 1));
         outMsgContext.setProperty(JsonConstant.FIELD_FILTER,
             setOf("status", "variance"));
 
@@ -94,7 +94,7 @@ public class FieldFilteringMessageFormatterTest {
 
     @Test
     public void testFilterWithAllFieldsMatchesUnfiltered() throws Exception {
-        PortfolioData data = new PortfolioData("SUCCESS", 0.025, 0.157, 1);
+        SampleData data = new SampleData("SUCCESS", 0.025, 0.157, 1);
         setReturnObject(data);
 
         // Unfiltered baseline
@@ -115,7 +115,7 @@ public class FieldFilteringMessageFormatterTest {
 
     @Test
     public void testFilterWithNoMatchingFieldsProducesEmptyResponse() throws 
Exception {
-        setReturnObject(new PortfolioData("SUCCESS", 0.025, 0.157, 1));
+        setReturnObject(new SampleData("SUCCESS", 0.025, 0.157, 1));
         outMsgContext.setProperty(JsonConstant.FIELD_FILTER,
             setOf("nonexistent"));
 
@@ -185,7 +185,7 @@ public class FieldFilteringMessageFormatterTest {
 
     @Test
     public void testNoFilterDelegatesDirectly() throws Exception {
-        setReturnObject(new PortfolioData("SUCCESS", 0.025, 0.157, 1));
+        setReturnObject(new SampleData("SUCCESS", 0.025, 0.157, 1));
         // No FIELD_FILTER set
 
         formatter.writeTo(outMsgContext, outputFormat, outputStream, false);
@@ -363,13 +363,13 @@ public class FieldFilteringMessageFormatterTest {
 
     // ── Test POJOs ────────────────────────────────────────────────────────
 
-    public static class PortfolioData {
+    public static class SampleData {
         public String status;
         public double variance;
         public double volatility;
         public long calcTimeUs;
-        public PortfolioData() {}
-        public PortfolioData(String s, double v, double vol, long t) {
+        public SampleData() {}
+        public SampleData(String s, double v, double vol, long t) {
             status = s; variance = v; volatility = vol; calcTimeUs = t;
         }
     }
@@ -677,7 +677,7 @@ public class FieldFilteringMessageFormatterTest {
     @Test
     public void testNestedDotNotation126of127FieldsRemoved() throws Exception {
         // The headline test: 127 fields per element, keep 1, verify massive
-        // payload reduction. This is the portfolio use case.
+        // payload reduction on wide nested data structures.
         ServiceResponse full = buildNestedResponse(10);
 
         // Full response (all fields)
@@ -741,6 +741,146 @@ public class FieldFilteringMessageFormatterTest {
             first.size() > 100);
     }
 
+    // ── Multi-level dot-notation (Map-of-List-of-Map pattern) ──────────
+    //
+    // Models the real-world pattern where a service returns a response POJO
+    // with a Map<String, Object> field ("data") that contains a
+    // List<Map<String, Object>> ("records") where each map has 127 keys.
+    // Two-level dot-notation: ?fields=status,data.records.s0
+
+    /** Response with a Map<String, Object> field — the Map-based pattern. */
+    public static class MapResponse {
+        public String status;
+        public long calcTimeMs;
+        public java.util.Map<String, Object> data;
+
+        public MapResponse() {}
+        public MapResponse(String status, long calcTimeMs,
+                           java.util.Map<String, Object> data) {
+            this.status = status;
+            this.calcTimeMs = calcTimeMs;
+            this.data = data;
+        }
+    }
+
+    /** Build a MapResponse with N calculation records (127 keys each)
+     *  inside data.records, plus metadata keys in data. */
+    private static MapResponse buildMapResponse(int nRecords) {
+        java.util.Map<String, Object> data = new java.util.LinkedHashMap<>();
+
+        // Records: list of maps, each with 127 keys
+        List<java.util.Map<String, Object>> records = new ArrayList<>();
+        for (int i = 0; i < nRecords; i++) {
+            java.util.Map<String, Object> record = new 
java.util.LinkedHashMap<>();
+            // 30 string fields
+            for (int n = 0; n < 30; n++) record.put("s" + n, "val_" + i + "_" 
+ n);
+            // 40 double fields
+            for (int n = 0; n < 40; n++) record.put("d" + n, n * 1.1 + i);
+            // 25 int fields
+            for (int n = 0; n < 25; n++) record.put("i" + n, n * 100 + i);
+            // 20 long fields
+            for (int n = 0; n < 20; n++) record.put("l" + n, (long)(n * 
1000000 + i));
+            // 12 boolean fields  — total: 30+40+25+20+12 = 127
+            for (int n = 0; n < 12; n++) record.put("b" + n, n % 2 == 0);
+            records.add(record);
+        }
+        data.put("records", records);
+
+        // Metadata keys (would be filtered out in a real query)
+        data.put("notes", Arrays.asList("note1", "note2"));
+        data.put("diagnostics", java.util.Map.of("cacheHit", true, 
"queryTimeMs", 42));
+        data.put("viewState", "some-large-view-state-blob");
+
+        return new MapResponse("SUCCESS", 42, data);
+    }
+
+    @Test
+    public void testTwoLevelDotNotationFiltersMapsRecursively() throws 
Exception {
+        // Two-level: data.records.s0 — keep only s0 in each calculation
+        setReturnObject(buildMapResponse(3));
+        outMsgContext.setProperty(JsonConstant.FIELD_FILTER,
+            setOf("status", "data.records.s0"));
+
+        formatter.writeTo(outMsgContext, outputFormat, outputStream, false);
+        JsonElement response = parseResponse();
+
+        // Top level: status + data (calcTimeMs filtered)
+        Assert.assertTrue(response.getAsJsonObject().has("status"));
+        Assert.assertTrue(response.getAsJsonObject().has("data"));
+        Assert.assertFalse("calcTimeMs should be filtered",
+            response.getAsJsonObject().has("calcTimeMs"));
+
+        // data: only "records" key (notes, diagnostics, viewState filtered)
+        var data = response.getAsJsonObject().getAsJsonObject("data");
+        Assert.assertEquals("data should have exactly 1 key", 1, data.size());
+        Assert.assertTrue(data.has("records"));
+
+        // Each calculation element: only s0 (126 of 127 keys filtered)
+        var calcs = data.getAsJsonArray("records");
+        Assert.assertEquals(3, calcs.size());
+        for (int i = 0; i < 3; i++) {
+            var calc = calcs.get(i).getAsJsonObject();
+            Assert.assertEquals("Element " + i + " should have exactly 1 key",
+                1, calc.size());
+            Assert.assertEquals("val_" + i + "_0",
+                calc.get("s0").getAsString());
+        }
+    }
+
+    @Test
+    public void testTwoLevelDotNotationMultipleSubFields() throws Exception {
+        // Keep 5 fields from each of 127 in the records
+        setReturnObject(buildMapResponse(2));
+        outMsgContext.setProperty(JsonConstant.FIELD_FILTER,
+            setOf("status", "data.records.s0", "data.records.d5",
+                  "data.records.i10", "data.records.l15",
+                  "data.records.b0"));
+
+        formatter.writeTo(outMsgContext, outputFormat, outputStream, false);
+        JsonElement response = parseResponse();
+
+        var calcs = response.getAsJsonObject().getAsJsonObject("data")
+            .getAsJsonArray("records");
+        var first = calcs.get(0).getAsJsonObject();
+        Assert.assertEquals("Should have 5 keys per element", 5, first.size());
+        Assert.assertTrue(first.has("s0"));
+        Assert.assertTrue(first.has("d5"));
+        Assert.assertTrue(first.has("i10"));
+        Assert.assertTrue(first.has("l15"));
+        Assert.assertTrue(first.has("b0"));
+    }
+
+    @Test
+    public void testTwoLevelDotNotation97PercentReduction() throws Exception {
+        // The dramatic test: 10 records x 127 fields, filter to 1
+        MapResponse fullResp = buildMapResponse(10);
+
+        // Full response
+        setReturnObject(fullResp);
+        formatter.writeTo(outMsgContext, outputFormat, outputStream, false);
+        int fullSize = outputStream.size();
+
+        // Filtered: keep only data.records.s0
+        outputStream.reset();
+        outMsgContext.setProperty(JsonConstant.FIELD_FILTER,
+            setOf("status", "data.records.s0"));
+        formatter.writeTo(outMsgContext, outputFormat, outputStream, false);
+        int filteredSize = outputStream.size();
+
+        double reductionPct = (1.0 - (double) filteredSize / fullSize) * 100;
+
+        Assert.assertTrue(
+            "Full response (" + fullSize + " bytes) should be > 10KB",
+            fullSize > 10000);
+        Assert.assertTrue(
+            "Filtered response (" + filteredSize + " bytes) should be < 500 
bytes",
+            filteredSize < 500);
+        Assert.assertTrue(
+            "Payload reduction (" + String.format("%.0f", reductionPct)
+                + "%) should exceed 95%",
+            reductionPct > 95.0);
+    }
+
     static class TestHelper {
         static org.apache.axiom.om.OMElement createFaultElement() {
             var factory = OMAbstractFactory.getOMFactory();

Reply via email to