Github user mattyb149 commented on a diff in the pull request:
https://github.com/apache/nifi/pull/254#discussion_r71980960
--- Diff:
nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONtoCSV.java
---
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.standard;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.AllowableValue;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.StreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+@Tags({"JSON, CSV, convert"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@WritesAttribute(attribute="mime.type", description="Sets the mime type to
application/csv")
+@CapabilityDescription("Converts a JSON document to CSV. This processor
reads the entire content " +
+ "of incoming FlowFiles into memory in order to perform the
conversion. The processor will parse JSON Arrays, JSON Objects " +
+ "and the combination of the two regardless of the level of nesting
in the JSON document.")
+public class ConvertJSONtoCSV extends AbstractProcessor {
+ volatile String delimiter;
+ volatile String removeFields;
+ volatile String emptyFields = "";
+
+ public static final AllowableValue INCLUDE_HEADER_TRUE = new
AllowableValue(
+ "True", "True", "Creates headers for each JSON file.");
+ public static final AllowableValue INCLUDE_HEADER_FALSE = new
AllowableValue(
+ "False", "False", "Only parses the JSON fields and does not
include headers");
+
+
+ public static final PropertyDescriptor DELIMITER = new
PropertyDescriptor
+ .Builder().name("CSV Delimiter")
+ .description("Delimiter used for the generated CSV output
(Example: , | -)")
+ .required(true)
+ .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+ .build();
+
+ public static final PropertyDescriptor REMOVE_FIELDS = new
PropertyDescriptor
+ .Builder().name("Remove JSON Fields/Columns")
+ .description("Comma delimited list of columns that should be
removed when parsing JSON and building the CSV. " +
+ "This includes all top level and most granular nested
fields/columns. By default with nothing specified every field " +
+ "will be parsed.")
+ .required(false)
+ .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+ .build();
+
+ public static final PropertyDescriptor EMPTY_FIELDS = new
PropertyDescriptor
+ .Builder().name("Empty field value")
+ .description("During denormalization/flattening of the JSON
the value that will be substituted for empty fields values " +
+ "(Example: NULL). Defaults to empty string if not
specified.")
+ .required(false)
+ .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+ .build();
+
+ public static final PropertyDescriptor INCLUDE_HEADERS = new
PropertyDescriptor
+ .Builder().name("Include Headers")
+ .description("Whether or not to include headers in the CSV
output")
+ .required(true)
+ .allowableValues(INCLUDE_HEADER_TRUE, INCLUDE_HEADER_FALSE)
+ .defaultValue(INCLUDE_HEADER_TRUE.getValue())
+ .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+ .build();
+
+ public static final Relationship RELATIONSHIP_SUCCESS = new
Relationship.Builder()
+ .name("success")
+ .description("Successfully parsing the JSON file to CSV ")
+ .build();
+
+ public static final Relationship RELATIONSHIP_FAILURE = new
Relationship.Builder()
+ .name("failure")
+ .description("Failed parsing the JSON file to CSV ")
+ .build();
+
--- End diff --
Should there be an "original" relationship that the JSON file would be
transferred to upon success? Other processors do similar things.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---