[
https://issues.apache.org/jira/browse/APEXMALHAR-2011?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15197049#comment-15197049
]
ASF GitHub Bot commented on APEXMALHAR-2011:
--------------------------------------------
Github user chinmaykolhatkar commented on a diff in the pull request:
https://github.com/apache/incubator-apex-malhar/pull/211#discussion_r56299388
--- Diff:
contrib/src/test/java/com/datatorrent/contrib/avro/AvroReaderWriterTest.java ---
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.datatorrent.contrib.avro;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+
+import junit.framework.Assert;
+
+@SuppressWarnings("deprecation")
+public class AvroReaderWriterTest {
+
+ private AvroToPojo reader = null;
+ private PojoToAvro writer = null;
+
+ public static final String schemaFile =
"{\"namespace\":\"abc\",\"type\":\"record\",\"doc\":\"Order
schema\",\"name\":\"Order\",\"fields\":[{\"name\":\"orderId\",\"type\":
\"long\"},{\"name\":\"customerId\",\"type\":
\"int\"},{\"name\":\"total\",\"type\":
\"double\"},{\"name\":\"customerName\",\"type\": \"string\"}]}";
+
+ public static final String fieldInfoInitMap =
"orderId:orderId:LONG,customerId:customerId:INTEGER,customerName:customerName:STRING,total:total:DOUBLE";
+
+ private Class<?> className = SimpleOrder.class;
+
+ private List<GenericRecord> recordList = null;
+
+ public AvroReaderWriterTest() {
+
+ }
+
+ @Before
+ public void initializeReaderAndWriter() {
+ reader = new AvroToPojo();
+ writer = new PojoToAvro();
+
+ recordList = new LinkedList<GenericRecord>();
+
+ initializeWriter();
+ initializeReader();
+
+ }
+
+ public void initializeWriter() {
+
+ getWriter().setSchemaString(schemaFile);
+
+ try {
+ getWriter().parseSchema();
+ } catch (IOException e) {
+ LOG.error("Error in parsing");
+ }
+
+ getWriter().setCls(className);
+
+ getWriter().initializeColumnMap(writer.getSchema());
+
+ }
+
+ public void createReaderInput() {
+ int cnt = 3;
+
+ while (cnt > 0) {
+
+ GenericRecord rec = new
GenericData.Record(reader.getSchema());
+ rec.put("orderId", cnt * 1);
+ rec.put("customerId", cnt * 2);
+ rec.put("total", cnt * 1.5);
+ rec.put("customerName", "*" + cnt + "*");
+ cnt--;
+ recordList.add(rec);
+
+ }
+ }
+
+ public void initializeReader() {
+ reader.setSchemaString(schemaFile);
+
+ try {
+ getReader().parseSchema();
+ } catch (IOException e) {
+ LOG.error("Exception in parsing schema," +
e.getMessage());
+ }
+
+ getReader().setCls(className);
+
+ /**
+ * To set the field mapping we use comma separated list of
+ * fieldInPojo:FieldInGenericRecord:DataType This can be set as
a part
+ * of the operator initialization phase on the fieldInfoInitMap
string
+ * If this is not given the reflection is used only once to
generate
+ * this map.
+ */
+
+ if (fieldInfoInitMap.isEmpty() ||
getReader().getGenericRecordToPOJOFieldsMapping() == null) {
+ getReader().setFieldInfos(
+
getReader().createFieldInfoMap(getReader().generateFieldInfoInputs(SimpleOrder.class)));
+ } else {
+
getReader().setFieldInfos(getReader().createFieldInfoMap(fieldInfoInitMap));
+ }
+
+ getReader().initColumnFieldSetters(getReader().getFieldInfos());
+
+ createReaderInput();
+
+ }
+
+ public AvroToPojo getReader() {
+ return reader;
+ }
+
+ public void setReader(AvroToPojo reader) {
+ this.reader = reader;
+ }
+
+ public PojoToAvro getWriter() {
+ return writer;
+ }
+
+ public void setWriter(PojoToAvro writer) {
+ this.writer = writer;
+ }
+
+ @Test
+ public void testWriter() {
--- End diff --
You need to make operator go through proper lifecycle in unit tests.
op.setup()
op.beingWindow(x)
op.port.put(<tuple>)
op.endWindow();
op.teardown()
> POJO to Avro record converter
> -----------------------------
>
> Key: APEXMALHAR-2011
> URL: https://issues.apache.org/jira/browse/APEXMALHAR-2011
> Project: Apache Apex Malhar
> Issue Type: New Feature
> Reporter: devendra tagare
>
> We are looking to develop a record converter which would take a POJO as an
> input and emit a Generic record as the output based on the given Avro schema.
> The expected inputs for this operator would be,
> 1.Class Name of the incoming POJO
> 2.Avro schema for the Generic Record to emit.
> This operator would receive an Object on its input port and emit a Generic
> record on the output port.
> To start with, we would handle primitive types and then go on to handle
> complex types.
> Thanks,
> Dev
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)