Repository: nifi
Updated Branches:
  refs/heads/master bfecf0f06 -> 2e4ddbf3b


NIFI-4893 Cannot convert Avro schemas to Record schemas with default arrays

Signed-off-by: Mark Payne <marka...@hotmail.com>


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/2e4ddbf3
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/2e4ddbf3
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/2e4ddbf3

Branch: refs/heads/master
Commit: 2e4ddbf3b2b27c45c1bf746cbc0358b0b4576a25
Parents: bfecf0f
Author: gardellajuanpablo <gardellajuanpa...@gmail.com>
Authored: Mon Feb 19 16:07:14 2018 -0300
Committer: Mark Payne <marka...@hotmail.com>
Committed: Tue Mar 6 14:09:39 2018 -0500

----------------------------------------------------------------------
 .../nifi-avro-record-utils/pom.xml              |   4 +
 .../java/org/apache/nifi/avro/AvroTypeUtil.java |  36 ++-
 .../org/apache/nifi/avro/TestAvroTypeUtil.java  | 235 ++++++++++++-------
 .../nifi/avro/defaultArrayInRecords1.json       |  27 +++
 .../nifi/avro/defaultArrayInRecords2.json       |  27 +++
 .../apache/nifi/avro/defaultArrayValue1.json    |  16 ++
 .../apache/nifi/avro/defaultArrayValue2.json    |  16 ++
 7 files changed, 264 insertions(+), 97 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/pom.xml
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/pom.xml
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/pom.xml
index cff5b1a..2f43c5f 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/pom.xml
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/pom.xml
@@ -51,6 +51,10 @@
                     <excludes combine.children="append">
                         
<exclude>src/test/resources/org/apache/nifi/avro/data.avro</exclude>
                         
<exclude>src/test/resources/org/apache/nifi/avro/schema.json</exclude>
+                        
<exclude>src/test/resources/org/apache/nifi/avro/defaultArrayValue1.json</exclude>
+                        
<exclude>src/test/resources/org/apache/nifi/avro/defaultArrayValue2.json</exclude>
+                        
<exclude>src/test/resources/org/apache/nifi/avro/defaultArrayInRecords1.json</exclude>
+                        
<exclude>src/test/resources/org/apache/nifi/avro/defaultArrayInRecords2.json</exclude>
                       
                     </excludes>
                 </configuration>
             </plugin>

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/main/java/org/apache/nifi/avro/AvroTypeUtil.java
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/main/java/org/apache/nifi/avro/AvroTypeUtil.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/main/java/org/apache/nifi/avro/AvroTypeUtil.java
index 6ae35f8..92678c8 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/main/java/org/apache/nifi/avro/AvroTypeUtil.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/main/java/org/apache/nifi/avro/AvroTypeUtil.java
@@ -315,12 +315,7 @@ public class AvroTypeUtil {
                         final Schema fieldSchema = field.schema();
                         final DataType fieldType = 
determineDataType(fieldSchema, knownRecordTypes);
                         final boolean nullable = isNullable(fieldSchema);
-
-                        if (field.defaultVal() == JsonProperties.NULL_VALUE) {
-                            recordFields.add(new RecordField(fieldName, 
fieldType, field.aliases(), nullable));
-                        } else {
-                            recordFields.add(new RecordField(fieldName, 
fieldType, field.defaultVal(), field.aliases(), nullable));
-                        }
+                        addFieldToList(recordFields, field, fieldName, 
fieldSchema, fieldType, nullable);
                     }
 
                     recordSchema.setFields(recordFields);
@@ -396,12 +391,7 @@ public class AvroTypeUtil {
             final Schema fieldSchema = field.schema();
             final DataType dataType = 
AvroTypeUtil.determineDataType(fieldSchema, knownRecords);
             final boolean nullable = isNullable(fieldSchema);
-
-            if (field.defaultVal() == JsonProperties.NULL_VALUE) {
-                recordFields.add(new RecordField(fieldName, dataType, 
field.aliases(), nullable));
-            } else {
-                recordFields.add(new RecordField(fieldName, dataType, 
field.defaultVal(), field.aliases(), nullable));
-            }
+            addFieldToList(recordFields, field, fieldName, fieldSchema, 
dataType, nullable);
         }
 
         recordSchema.setFields(recordFields);
@@ -505,6 +495,28 @@ public class AvroTypeUtil {
         return convertToAvroObject(rawValue, fieldSchema, 
fieldSchema.getName());
     }
 
+    /**
+     * Adds fields to <tt>recordFields</tt> list.
+     * @param recordFields - record fields are added to this list.
+     * @param field - the field
+     * @param fieldName - field name
+     * @param fieldSchema - field schema
+     * @param dataType -  data type
+     * @param nullable - is nullable?
+     */
+    private static void addFieldToList(final List<RecordField> recordFields, 
final Field field, final String fieldName,
+            final Schema fieldSchema, final DataType dataType, final boolean 
nullable) {
+        if (field.defaultVal() == JsonProperties.NULL_VALUE) {
+            recordFields.add(new RecordField(fieldName, dataType, 
field.aliases(), nullable));
+        } else {
+            Object defaultValue = field.defaultVal();
+            if (fieldSchema.getType() == Schema.Type.ARRAY && 
!DataTypeUtils.isArrayTypeCompatible(defaultValue)) {
+                defaultValue = defaultValue instanceof List ? ((List<?>) 
defaultValue).toArray() : new Object[0];
+            }
+            recordFields.add(new RecordField(fieldName, dataType, 
defaultValue, field.aliases(), nullable));
+        }
+    }
+
     @SuppressWarnings("unchecked")
     private static Object convertToAvroObject(final Object rawValue, final 
Schema fieldSchema, final String fieldName) {
         if (rawValue == null) {

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java
index d24488e..bf7d255 100644
--- 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/java/org/apache/nifi/avro/TestAvroTypeUtil.java
@@ -17,6 +17,7 @@
 
 package org.apache.nifi.avro;
 
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -37,8 +38,11 @@ import org.apache.avro.Schema;
 import org.apache.avro.Schema.Field;
 import org.apache.avro.Schema.Type;
 import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumReader;
 import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.generic.GenericRecordBuilder;
+import org.apache.avro.generic.GenericData.Record;
 import org.apache.nifi.schema.access.SchemaNotFoundException;
 import org.apache.nifi.serialization.SimpleRecordSchema;
 import org.apache.nifi.serialization.record.DataType;
@@ -56,7 +60,8 @@ public class TestAvroTypeUtil {
         final List<RecordField> fields = new ArrayList<>();
         fields.add(new RecordField("int", RecordFieldType.INT.getDataType()));
         fields.add(new RecordField("long", 
RecordFieldType.LONG.getDataType()));
-        fields.add(new RecordField("string", 
RecordFieldType.STRING.getDataType(), "hola", 
Collections.singleton("greeting")));
+        fields.add(new RecordField("string", 
RecordFieldType.STRING.getDataType(), "hola",
+                Collections.singleton("greeting")));
         fields.add(new RecordField("byte", 
RecordFieldType.BYTE.getDataType()));
         fields.add(new RecordField("char", 
RecordFieldType.CHAR.getDataType()));
         fields.add(new RecordField("short", 
RecordFieldType.SHORT.getDataType()));
@@ -72,7 +77,6 @@ public class TestAvroTypeUtil {
         final DataType mapType = 
RecordFieldType.MAP.getMapDataType(RecordFieldType.LONG.getDataType());
         fields.add(new RecordField("map", mapType));
 
-
         final List<RecordField> personFields = new ArrayList<>();
         personFields.add(new RecordField("name", 
RecordFieldType.STRING.getDataType()));
         personFields.add(new RecordField("dob", 
RecordFieldType.DATE.getDataType()));
@@ -80,7 +84,6 @@ public class TestAvroTypeUtil {
         final DataType personType = 
RecordFieldType.RECORD.getRecordDataType(personSchema);
         fields.add(new RecordField("person", personType));
 
-
         final RecordSchema recordSchema = new SimpleRecordSchema(fields);
 
         final Schema avroSchema = AvroTypeUtil.extractAvroSchema(recordSchema);
@@ -89,7 +92,8 @@ public class TestAvroTypeUtil {
         for (final Field field : avroSchema.getFields()) {
             final Schema fieldSchema = field.schema();
             assertEquals(Type.UNION, fieldSchema.getType());
-            assertTrue("Field " + field.name() + " does not contain NULL 
type", fieldSchema.getTypes().contains(Schema.create(Type.NULL)));
+            assertTrue("Field " + field.name() + " does not contain NULL type",
+                    fieldSchema.getTypes().contains(Schema.create(Type.NULL)));
         }
 
         final RecordSchema afterConversion = 
AvroTypeUtil.createSchema(avroSchema);
@@ -114,33 +118,119 @@ public class TestAvroTypeUtil {
         assertEquals(Collections.singleton("greeting"), 
stringField.getAliases());
     }
 
+    /**
+     * The issue consists on having an Avro's schema with a default value in an
+     * array. See
+     * <a href="https://issues.apache.org/jira/browse/NIFI-4893";>NIFI-4893</a>.
+     * @throws IOException
+     *             schema not found.
+     */
     @Test
-    // Simple recursion is a record A composing itself (similar to a 
LinkedList Node referencing 'next')
+    public void testDefaultArrayValue1() throws IOException {
+        Schema avroSchema = new 
Schema.Parser().parse(getClass().getResourceAsStream("defaultArrayValue1.json"));
+        GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
+        Record r = builder.build();
+        @SuppressWarnings("unchecked")
+        GenericData.Array<Integer> values = (GenericData.Array<Integer>) 
r.get("listOfInt");
+        assertEquals(values.size(), 0);
+        RecordSchema record = AvroTypeUtil.createSchema(avroSchema);
+        RecordField field = record.getField("listOfInt").get();
+        assertEquals(RecordFieldType.ARRAY, 
field.getDataType().getFieldType());
+        assertTrue(field.getDefaultValue() instanceof Object[]);
+        assertEquals(0, ((Object[]) field.getDefaultValue()).length);
+    }
+
+    /**
+     * The issue consists on having an Avro's schema with a default value in an
+     * array. See
+     * <a href="https://issues.apache.org/jira/browse/NIFI-4893";>NIFI-4893</a>.
+     * @throws IOException
+     *             schema not found.
+     */
+    @Test
+    public void testDefaultArrayValue2() throws IOException {
+        Schema avroSchema = new 
Schema.Parser().parse(getClass().getResourceAsStream("defaultArrayValue2.json"));
+        GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
+        Record r = builder.build();
+        @SuppressWarnings("unchecked")
+        GenericData.Array<Integer> values = (GenericData.Array<Integer>) 
r.get("listOfInt");
+        assertArrayEquals(new Object[] { 1, 2 }, values.toArray());
+        RecordSchema record = AvroTypeUtil.createSchema(avroSchema);
+        RecordField field = record.getField("listOfInt").get();
+        assertEquals(RecordFieldType.ARRAY, 
field.getDataType().getFieldType());
+        assertTrue(field.getDefaultValue() instanceof Object[]);
+        assertArrayEquals(new Object[] { 1, 2 }, ((Object[]) 
field.getDefaultValue()));
+    }
+
+    /**
+     * The issue consists on having an Avro's schema with a default value in an
+     * array. See
+     * <a href="https://issues.apache.org/jira/browse/NIFI-4893";>NIFI-4893</a>.
+     * @throws IOException
+     *             schema not found.
+     */
+    @Test
+    public void testDefaultArrayValuesInRecordsCase1() throws IOException {
+        Schema avroSchema = new 
Schema.Parser().parse(getClass().getResourceAsStream("defaultArrayInRecords1.json"));
+        GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
+        Record field1Record = new 
GenericRecordBuilder(avroSchema.getField("field1").schema()).build();
+        builder.set("field1", field1Record);
+        Record r = builder.build();
+
+        @SuppressWarnings("unchecked")
+        GenericData.Array<Integer> values = (GenericData.Array<Integer>) 
((GenericRecord) r.get("field1"))
+                .get("listOfInt");
+        assertArrayEquals(new Object[] {}, values.toArray());
+        RecordSchema record = AvroTypeUtil.createSchema(avroSchema);
+        RecordField field = record.getField("field1").get();
+        assertEquals(RecordFieldType.RECORD, 
field.getDataType().getFieldType());
+        RecordDataType data = (RecordDataType) field.getDataType();
+        RecordSchema childSchema = data.getChildSchema();
+        RecordField childField = childSchema.getField("listOfInt").get();
+        assertEquals(RecordFieldType.ARRAY, 
childField.getDataType().getFieldType());
+        assertTrue(childField.getDefaultValue() instanceof Object[]);
+        assertArrayEquals(new Object[] {}, ((Object[]) 
childField.getDefaultValue()));
+    }
+
+    /**
+    * The issue consists on having an Avro's schema with a default value in an
+    * array. See
+    * <a href="https://issues.apache.org/jira/browse/NIFI-4893";>NIFI-4893</a>.
+    * @throws IOException
+    *             schema not found.
+    */
+   @Test
+   public void testDefaultArrayValuesInRecordsCase2() throws IOException {
+       Schema avroSchema = new 
Schema.Parser().parse(getClass().getResourceAsStream("defaultArrayInRecords2.json"));
+       GenericRecordBuilder builder = new GenericRecordBuilder(avroSchema);
+       Record field1Record = new 
GenericRecordBuilder(avroSchema.getField("field1").schema()).build();
+       builder.set("field1", field1Record);
+       Record r = builder.build();
+
+       @SuppressWarnings("unchecked")
+       GenericData.Array<Integer> values = (GenericData.Array<Integer>) 
((GenericRecord) r.get("field1"))
+               .get("listOfInt");
+       assertArrayEquals(new Object[] {1,2,3}, values.toArray());
+       RecordSchema record = AvroTypeUtil.createSchema(avroSchema);
+       RecordField field = record.getField("field1").get();
+       assertEquals(RecordFieldType.RECORD, 
field.getDataType().getFieldType());
+       RecordDataType data = (RecordDataType) field.getDataType();
+       RecordSchema childSchema = data.getChildSchema();
+       RecordField childField = childSchema.getField("listOfInt").get();
+       assertEquals(RecordFieldType.ARRAY, 
childField.getDataType().getFieldType());
+       assertTrue(childField.getDefaultValue() instanceof Object[]);
+       assertArrayEquals(new Object[] {1,2,3}, ((Object[]) 
childField.getDefaultValue()));
+   }
+    @Test
+    // Simple recursion is a record A composing itself (similar to a 
LinkedList Node
+    // referencing 'next')
     public void testSimpleRecursiveSchema() {
-        Schema recursiveSchema = new Schema.Parser().parse(
-                "{\n" +
-                "  \"namespace\": \"org.apache.nifi.testing\",\n" +
-                "  \"name\": \"NodeRecord\",\n" +
-                "  \"type\": \"record\",\n" +
-                "  \"fields\": [\n" +
-                "    {\n" +
-                "      \"name\": \"id\",\n" +
-                "      \"type\": \"int\"\n" +
-                "    },\n" +
-                "    {\n" +
-                "      \"name\": \"value\",\n" +
-                "      \"type\": \"string\"\n" +
-                "    },\n" +
-                "    {\n" +
-                "      \"name\": \"parent\",\n" +
-                "      \"type\": [\n" +
-                "        \"null\",\n" +
-                "        \"NodeRecord\"\n" +
-                "      ]\n" +
-                "    }\n" +
-                "  ]\n" +
-                "}\n"
-        );
+        Schema recursiveSchema = new Schema.Parser().parse("{\n" + "  
\"namespace\": \"org.apache.nifi.testing\",\n"
+                + "  \"name\": \"NodeRecord\",\n" + "  \"type\": 
\"record\",\n" + "  \"fields\": [\n" + "    {\n"
+                + "      \"name\": \"id\",\n" + "      \"type\": \"int\"\n" + 
"    },\n" + "    {\n"
+                + "      \"name\": \"value\",\n" + "      \"type\": 
\"string\"\n" + "    },\n" + "    {\n"
+                + "      \"name\": \"parent\",\n" + "      \"type\": [\n" + "  
      \"null\",\n"
+                + "        \"NodeRecord\"\n" + "      ]\n" + "    }\n" + "  
]\n" + "}\n");
 
         // Make sure the following doesn't throw an exception
         RecordSchema result = AvroTypeUtil.createSchema(recursiveSchema);
@@ -160,55 +250,27 @@ public class TestAvroTypeUtil {
         Assert.assertTrue(parentField.isPresent());
         Assert.assertEquals(RecordFieldType.RECORD, 
parentField.get().getDataType().getFieldType());
 
-        // The 'parent' field should have a circular schema reference to the 
top level record schema, similar to how Avro handles this
-        Assert.assertEquals(result, 
((RecordDataType)parentField.get().getDataType()).getChildSchema());
+        // The 'parent' field should have a circular schema reference to the 
top level
+        // record schema, similar to how Avro handles this
+        Assert.assertEquals(result, ((RecordDataType) 
parentField.get().getDataType()).getChildSchema());
     }
 
     @Test
-    // Complicated recursion is a record A composing record B, who composes a 
record A
+    // Complicated recursion is a record A composing record B, who composes a 
record
+    // A
     public void testComplicatedRecursiveSchema() {
-        Schema recursiveSchema = new Schema.Parser().parse(
-                "{\n" +
-                "  \"namespace\": \"org.apache.nifi.testing\",\n" +
-                "  \"name\": \"Record_A\",\n" +
-                "  \"type\": \"record\",\n" +
-                "  \"fields\": [\n" +
-                "    {\n" +
-                "      \"name\": \"id\",\n" +
-                "      \"type\": \"int\"\n" +
-                "    },\n" +
-                "    {\n" +
-                "      \"name\": \"value\",\n" +
-                "      \"type\": \"string\"\n" +
-                "    },\n" +
-                "    {\n" +
-                "      \"name\": \"child\",\n" +
-                "      \"type\": {\n" +
-                "        \"namespace\": \"org.apache.nifi.testing\",\n" +
-                "        \"name\": \"Record_B\",\n" +
-                "        \"type\": \"record\",\n" +
-                "        \"fields\": [\n" +
-                "          {\n" +
-                "            \"name\": \"id\",\n" +
-                "            \"type\": \"int\"\n" +
-                "          },\n" +
-                "          {\n" +
-                "            \"name\": \"value\",\n" +
-                "            \"type\": \"string\"\n" +
-                "          },\n" +
-                "          {\n" +
-                "            \"name\": \"parent\",\n" +
-                "            \"type\": [\n" +
-                "              \"null\",\n" +
-                "              \"Record_A\"\n" +
-                "            ]\n" +
-                "          }\n" +
-                "        ]\n" +
-                "      }\n" +
-                "    }\n" +
-                "  ]\n" +
-                "}\n"
-        );
+        Schema recursiveSchema = new Schema.Parser().parse("{\n" + "  
\"namespace\": \"org.apache.nifi.testing\",\n"
+                + "  \"name\": \"Record_A\",\n" + "  \"type\": \"record\",\n" 
+ "  \"fields\": [\n" + "    {\n"
+                + "      \"name\": \"id\",\n" + "      \"type\": \"int\"\n" + 
"    },\n" + "    {\n"
+                + "      \"name\": \"value\",\n" + "      \"type\": 
\"string\"\n" + "    },\n" + "    {\n"
+                + "      \"name\": \"child\",\n" + "      \"type\": {\n"
+                + "        \"namespace\": \"org.apache.nifi.testing\",\n" + "  
      \"name\": \"Record_B\",\n"
+                + "        \"type\": \"record\",\n" + "        \"fields\": 
[\n" + "          {\n"
+                + "            \"name\": \"id\",\n" + "            \"type\": 
\"int\"\n" + "          },\n"
+                + "          {\n" + "            \"name\": \"value\",\n" + "   
         \"type\": \"string\"\n"
+                + "          },\n" + "          {\n" + "            \"name\": 
\"parent\",\n"
+                + "            \"type\": [\n" + "              \"null\",\n" + 
"              \"Record_A\"\n"
+                + "            ]\n" + "          }\n" + "        ]\n" + "      
}\n" + "    }\n" + "  ]\n" + "}\n");
 
         // Make sure the following doesn't throw an exception
         RecordSchema recordASchema = 
AvroTypeUtil.createSchema(recursiveSchema);
@@ -229,7 +291,7 @@ public class TestAvroTypeUtil {
         Assert.assertEquals(RecordFieldType.RECORD, 
recordAChildField.get().getDataType().getFieldType());
 
         // Get the child schema
-        RecordSchema recordBSchema = 
((RecordDataType)recordAChildField.get().getDataType()).getChildSchema();
+        RecordSchema recordBSchema = ((RecordDataType) 
recordAChildField.get().getDataType()).getChildSchema();
 
         // Make sure it parsed correctly
         Assert.assertEquals(3, recordBSchema.getFieldCount());
@@ -246,8 +308,9 @@ public class TestAvroTypeUtil {
         Assert.assertTrue(recordBParentField.isPresent());
         Assert.assertEquals(RecordFieldType.RECORD, 
recordBParentField.get().getDataType().getFieldType());
 
-        // Make sure the 'parent' field has a schema reference back to the 
original top level record schema
-        Assert.assertEquals(recordASchema, 
((RecordDataType)recordBParentField.get().getDataType()).getChildSchema());
+        // Make sure the 'parent' field has a schema reference back to the 
original top
+        // level record schema
+        Assert.assertEquals(recordASchema, ((RecordDataType) 
recordBParentField.get().getDataType()).getChildSchema());
     }
 
     @Test
@@ -259,9 +322,9 @@ public class TestAvroTypeUtil {
         RecordSchema recordASchema = 
AvroTypeUtil.createSchema(recursiveSchema.getTypes().get(0));
 
         // check the fix with the proper file
-        try(DataFileStream<GenericRecord> r = new 
DataFileStream<>(getClass().getResourceAsStream("data.avro"),
+        try (DataFileStream<GenericRecord> r = new 
DataFileStream<>(getClass().getResourceAsStream("data.avro"),
                 new GenericDatumReader<>())) {
-            GenericRecord n= r.next();
+            GenericRecord n = r.next();
             AvroTypeUtil.convertAvroRecordToMap(n, recordASchema);
         }
     }
@@ -276,14 +339,14 @@ public class TestAvroTypeUtil {
 
         // Double to Decimal
         expects.put(123d, "123.00000000");
-        // Double can not represent exact 1234567890.12345678, so use 1 less 
digit to test here.
+        // Double can not represent exact 1234567890.12345678, so use 1 less 
digit to
+        // test here.
         expects.put(1234567890.12345678d, "1234567890.12345670");
         expects.put(123456789.12345678d, "123456789.12345678");
         expects.put(1234567890123456d, "1234567890123456.00000000");
         // ROUND HALF UP.
         expects.put(0.1234567890123456d, "0.12345679");
 
-
         // BigDecimal to BigDecimal
         expects.put(new BigDecimal("123"), "123.00000000");
         expects.put(new BigDecimal("1234567890.12345678"), 
"1234567890.12345678");
@@ -291,7 +354,6 @@ public class TestAvroTypeUtil {
         // ROUND HALF UP.
         expects.put(new BigDecimal("0.123456789012345678"), "0.12345679");
 
-
         // String to BigDecimal
         expects.put("123", "123.00000000");
         expects.put("1234567890.12345678", "1234567890.12345678");
@@ -316,15 +378,18 @@ public class TestAvroTypeUtil {
                     // Expected behavior.
                     return;
                 }
-                fail(String.format("Unexpected exception, %s with %s %s while 
expecting %s", e, rawValue.getClass().getSimpleName(), rawValue, expect));
+                fail(String.format("Unexpected exception, %s with %s %s while 
expecting %s", e,
+                        rawValue.getClass().getSimpleName(), rawValue, 
expect));
                 return;
             }
 
             assertTrue(convertedValue instanceof ByteBuffer);
             final ByteBuffer serializedBytes = (ByteBuffer) convertedValue;
 
-            final BigDecimal bigDecimal = new 
Conversions.DecimalConversion().fromBytes(serializedBytes, fieldSchema, 
decimalType);
-            assertEquals(String.format("%s %s should be converted to %s", 
rawValue.getClass().getSimpleName(), rawValue, expect), expect, 
bigDecimal.toString());
+            final BigDecimal bigDecimal = new 
Conversions.DecimalConversion().fromBytes(serializedBytes, fieldSchema,
+                    decimalType);
+            assertEquals(String.format("%s %s should be converted to %s", 
rawValue.getClass().getSimpleName(), rawValue,
+                    expect), expect, bigDecimal.toString());
         });
 
     }

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords1.json
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords1.json
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords1.json
new file mode 100644
index 0000000..5344aa4
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords1.json
@@ -0,0 +1,27 @@
+{
+   "type":"record",
+   "name":"Foo",
+   "fields":[
+      {
+         "name":"field1",         
+         "type":
+            {
+               "type":"record",
+               "name":"BigDecimal",
+               "fields":[                 
+                  {
+                     "name":"listOfInt",
+                     "type":{
+                        "type":"array",
+                        "items":"int"
+                     },
+                     "doc":"array of ints",
+                     "default": 0
+                  }
+               ],
+               "logicalType":"BigDecimal"
+            }
+         
+      }
+   ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords2.json
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords2.json
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords2.json
new file mode 100644
index 0000000..67b2821
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayInRecords2.json
@@ -0,0 +1,27 @@
+{
+   "type":"record",
+   "name":"Foo",
+   "fields":[
+      {
+         "name":"field1",         
+         "type":
+            {
+               "type":"record",
+               "name":"BigDecimal",
+               "fields":[                 
+                  {
+                     "name":"listOfInt",
+                     "type":{
+                        "type":"array",
+                        "items":"int"
+                     },
+                     "doc":"array of ints",
+                     "default": [1,2,3]
+                  }
+               ],
+               "logicalType":"BigDecimal"
+            }
+         
+      }
+   ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue1.json
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue1.json
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue1.json
new file mode 100644
index 0000000..3e50f54
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue1.json
@@ -0,0 +1,16 @@
+{
+       "type": "record",
+       "name": "Foo1",
+       "namespace": "foo.namespace",
+       "fields": [
+               {
+                       "name": "listOfInt",
+                       "type": {
+                               "type": "array",
+                               "items": "int"
+                       },
+                       "doc": "array of ints",
+                       "default": 0
+               }
+       ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/2e4ddbf3/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue2.json
----------------------------------------------------------------------
diff --git 
a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue2.json
 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue2.json
new file mode 100644
index 0000000..8a15b1e
--- /dev/null
+++ 
b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-avro-record-utils/src/test/resources/org/apache/nifi/avro/defaultArrayValue2.json
@@ -0,0 +1,16 @@
+{
+       "type": "record",
+       "name": "Foo1",
+       "namespace": "foo.namespace",
+       "fields": [
+               {
+                       "name": "listOfInt",
+                       "type": {
+                               "type": "array",
+                               "items": "int"
+                       },
+                       "doc": "array of ints",
+                       "default": [1,2]
+               }
+       ]
+}
\ No newline at end of file

Reply via email to