alien11689 commented on a change in pull request #5081:
URL: https://github.com/apache/nifi/pull/5081#discussion_r637705334



##########
File path: 
nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/test/java/org/apache/nifi/processors/hive/TestConvertAvroToORC.java
##########
@@ -320,6 +322,78 @@ public void test_onTrigger_complex_record() throws 
Exception {
         assertEquals(sampleBigDecimal, ((HiveDecimalWritable) 
decimalFieldObject).getHiveDecimal().bigDecimalValue());
     }
 
+    @Test
+    public void test_onTrigger_complex_records_with_bigdecimals() throws 
Exception {
+
+        Map<String, Double> mapData1 = new TreeMap<String, Double>() {{
+            put("key1", 1.0);
+            put("key2", 2.0);
+        }};
+
+        DatumWriter<GenericData.Record> writer;
+        DataFileWriter<GenericData.Record> fileWriter = null;
+        ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+        Random random = new Random();
+        double bigDecimalUpperRange = 200;
+        for (int i = 0; i < 1000; ++i) {
+            ByteBuffer bigDecimalAsBytes = toByteBuffer(new 
BigDecimal(random.nextDouble() * bigDecimalUpperRange).setScale(2, 
RoundingMode.HALF_UP));
+            GenericData.Record record = 
TestNiFiOrcUtils.buildComplexAvroRecord(null, mapData1, "XYZ", 4L, 
Arrays.asList(100, 200), bigDecimalAsBytes);
+            if (i == 0) {
+                writer = new GenericDatumWriter<>(record.getSchema());
+                fileWriter = new DataFileWriter<>(writer);
+                fileWriter.create(record.getSchema(), out);
+            }
+            fileWriter.append(record);
+        }
+
+        fileWriter.flush();
+        fileWriter.close();
+        out.close();
+
+        Map<String, String> attributes = new HashMap<String, String>() {{
+            put(CoreAttributes.FILENAME.key(), "test");
+        }};
+        runner.enqueue(out.toByteArray(), attributes);
+        runner.run();
+
+        runner.assertAllFlowFilesTransferred(ConvertAvroToORC.REL_SUCCESS, 1);
+
+        // Write the flow file out to disk, since the ORC Reader needs a path
+        MockFlowFile resultFlowFile = 
runner.getFlowFilesForRelationship(ConvertAvroToORC.REL_SUCCESS).get(0);
+        assertEquals("1000", 
resultFlowFile.getAttribute(ConvertAvroToORC.RECORD_COUNT_ATTRIBUTE));
+        assertEquals("test.orc", 
resultFlowFile.getAttribute(CoreAttributes.FILENAME.key()));
+        byte[] resultContents = runner.getContentAsByteArray(resultFlowFile);
+        FileOutputStream fos = new FileOutputStream("target/test1.orc");
+        fos.write(resultContents);
+        fos.flush();
+        fos.close();
+
+        Configuration conf = new Configuration();
+        FileSystem fs = FileSystem.getLocal(conf);
+        Reader reader = OrcFile.createReader(new Path("target/test1.orc"), 
OrcFile.readerOptions(conf).filesystem(fs));
+        RecordReader rows = reader.rows();
+        while (rows.hasNext()) {
+            Object o = rows.next(null);
+            assertNotNull(o);
+            assertTrue(o instanceof OrcStruct);
+            TypeInfo resultSchema = TestNiFiOrcUtils.buildComplexOrcSchema();
+            StructObjectInspector inspector = (StructObjectInspector) 
OrcStruct.createObjectInspector(resultSchema);
+
+            assertDecimalInRange(inspector.getStructFieldData(o, 
inspector.getStructFieldRef("myDecimal")), bigDecimalUpperRange);

Review comment:
       I am using BigDecimal from string with non zeros after coma now so 
results should be always the same




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to