[GitHub] nifi issue #559: NIFI-2075: Addressing issues with Provenance Page

2016-06-21 Thread scottyaslan
Github user scottyaslan commented on the issue:

https://github.com/apache/nifi/pull/559
  
looks good


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #557: [NIFI-1879] Update containment for status history di...

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/557


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #554: NIFI-2032 port for 0.x

2016-06-21 Thread alopresto
Github user alopresto commented on the issue:

https://github.com/apache/nifi/pull/554
  
I'm going to rebase, squash, and re-push. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67982895
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi issue #554: NIFI-2032 port for 0.x

2016-06-21 Thread alopresto
Github user alopresto commented on the issue:

https://github.com/apache/nifi/pull/554
  
I fixed an issue in `GetSNMPTest` (a local variable was referenced from 
within an inner class, and wasn't declared `final`). I forgot to squash it with 
the commit that closed the PR. It was commit 
[`aeb07124d13e09a18a2f3085269f614a5d1ef710`](https://github.com/apache/nifi/commit/aeb07124d13e09a18a2f3085269f614a5d1ef710).
 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #534: Fix for NIFI-1838 & NIFI-1152

2016-06-21 Thread mattyb149
Github user mattyb149 commented on a diff in the pull request:

https://github.com/apache/nifi/pull/534#discussion_r67970007
  
--- Diff: 
nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/InvokeScriptedProcessor.java
 ---
@@ -92,11 +92,10 @@
 logger.error(message, t);
 }
 }
-} else {
-// Return defaults for now
-relationships.add(REL_SUCCESS);
-relationships.add(REL_FAILURE);
 }
+// Add defaults
+relationships.add(REL_SUCCESS);
+relationships.add(REL_FAILURE);
--- End diff --

Yeah if this is failing tests, I'd say take a look at the tests. We don't 
want to add default relationships, especially when there's a legit instance of 
a Processor, as it is the Processor's responsibility to define all relationships


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67969885
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/test/java/org/apache/nifi/processors/media/TestExtractMediaMetadata.java
 ---
@@ -0,0 +1,450 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.util.MockFlowFile;
+import org.apache.nifi.util.TestRunner;
+import org.apache.nifi.util.TestRunners;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class TestExtractMediaMetadata {
+
+@Test
+public void testProperties() {
+final TestRunner runner = TestRunners.newTestRunner(new 
ExtractMediaMetadata());
+ProcessContext context = runner.getProcessContext();
+Map propertyValues = 
context.getProperties();
+assertEquals(6, propertyValues.size());
+}
+
+@Test
+public void testRelationships() {
+final TestRunner runner = TestRunners.newTestRunner(new 
ExtractMediaMetadata());
+ProcessContext context = runner.getProcessContext();
+Set relationships = 
context.getAvailableRelationships();
+assertEquals(2, relationships.size());
+assertTrue(relationships.contains(ExtractMediaMetadata.SUCCESS));
+assertTrue(relationships.contains(ExtractMediaMetadata.FAILURE));
+}
+
+@Test
+public void testTextBytes() throws IOException {
+final TestRunner runner = TestRunners.newTestRunner(new 
ExtractMediaMetadata());
+runner.setProperty(ExtractMediaMetadata.MIME_TYPE_FILTER, 
"text/.*");
+runner.setProperty(ExtractMediaMetadata.METADATA_KEY_FILTER, "");
+runner.setProperty(ExtractMediaMetadata.METADATA_KEY_PREFIX, 
"txt.");
+runner.assertValid();
+
+final Map attrs = new HashMap<>();
+attrs.put("filename", "test1.txt");
+runner.enqueue("test1".getBytes(), attrs);
+runner.run();
+
+runner.assertAllFlowFilesTransferred(ExtractMediaMetadata.SUCCESS, 
1);
+runner.assertTransferCount(ExtractMediaMetadata.FAILURE, 0);
+
+final List successFiles = 
runner.getFlowFilesForRelationship(ExtractMediaMetadata.SUCCESS);
+MockFlowFile flowFile0 = successFiles.get(0);
+flowFile0.assertAttributeExists("filename");
+flowFile0.assertAttributeEquals("filename", "test1.txt");
+flowFile0.assertAttributeExists("txt.Content-Type");
+
assertTrue(flowFile0.getAttribute("txt.Content-Type").startsWith("text/plain"));
+flowFile0.assertAttributeExists("txt.X-Parsed-By");
+
assertTrue(flowFile0.getAttribute("txt.X-Parsed-By").contains("org.apache.tika.parser.DefaultParser"));
+
assertTrue(flowFile0.getAttribute("txt.X-Parsed-By").contains("org.apache.tika.parser.txt.TXTParser"));
+flowFile0.assertAttributeExists("txt.Content-Encoding");
+flowFile0.assertAttributeEquals("txt.Content-Encoding", 
"ISO-8859-1");
+flowFile0.assertContentEquals("test1".getBytes("UTF-8"));
+}
+
+@Test
+public void testNoFlowFile() throws IOException {
+final TestRunner runner = TestRunners.newTestRunner(new 
ExtractMediaMetadata());
+runner.setProperty(ExtractMediaMetadata.MIME_TYPE_FILTER, 
"text/.*");
+

[GitHub] nifi issue #554: NIFI-2032 port for 0.x

2016-06-21 Thread alopresto
Github user alopresto commented on the issue:

https://github.com/apache/nifi/pull/554
  
I am getting test failures:

```
Results :

Failed tests:
  KafkaPublisherTest.validateSuccessfulSendAsDelimited:106 expected:<3> but 
was:<-1>
  KafkaPublisherTest.validateSuccessfulSendAsWhole:79 expected:<0> but 
was:<-1>
  KafkaPublisherTest.validateWithNonDefaultPartitioner:250 null

Tests in error:
  KafkaPublisherTest.validateRetries:146 » ConsumerTimeout
  KafkaPublisherTest.validateWithMultiByteCharactersNoDelimiter:231 » 
ConsumerTimeout
```

Do these run successfully for you locally? I'm going to try re-running 
without parallel execution. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread joewitt
Github user joewitt commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67966608
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread jskora
Github user jskora commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67965881
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread jskora
Github user jskora commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67965763
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread jskora
Github user jskora commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67965642
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67964066
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
--- End diff --

Here it has "." making it seem like there 
is automatically a "." added but in the property descriptor it says the "." or 
"-" is not automatically added. I agree that we shouldn't lock the user into 
using ".", "-", etc. so this should be changed to reflect that.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #556: NIFI-615 - Create a processor to extract WAV file character...

2016-06-21 Thread jskora
Github user jskora commented on the issue:

https://github.com/apache/nifi/pull/556
  
I'm not sure why but only the main code commit, 
236266c9e4ed89b4c78438f36408b5f6e0b0c488, applies to 1.0/master without 
problems.  The other pom commits do not.

It is easy enough to fix them.  The pom.xml files in nifi-media-bundle/* 
(previously nifi-image-bundle) need "nifi-image-*" changed to "nifi-media-*" 
except for "nifi-image-viewer" references which do not change.  Also, the 
version number also needs to be bumped from 0.7.0-SNAPSHOT to 1.0.0-SNAPSHOT.

After that, it built and tested as expected, including the content buffer 
size limit throwing the exception until the buffer size is increase.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67963557
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67963320
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #556: NIFI-615 - Create a processor to extract WAV file ch...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/556#discussion_r67963044
  
--- Diff: 
nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/src/main/java/org/apache/nifi/processors/media/ExtractMediaMetadata.java
 ---
@@ -0,0 +1,311 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.media;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.regex.Pattern;
+
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
+import org.apache.nifi.annotation.behavior.SupportsBatching;
+import org.apache.nifi.annotation.behavior.WritesAttribute;
+import org.apache.nifi.annotation.behavior.WritesAttributes;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.logging.ProcessorLog;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.InputStreamCallback;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.util.ObjectHolder;
+
+import org.apache.tika.exception.TikaException;
+import org.apache.tika.io.TikaInputStream;
+import org.apache.tika.metadata.Metadata;
+import org.apache.tika.parser.AutoDetectParser;
+import org.apache.tika.sax.BodyContentHandler;
+import org.xml.sax.SAXException;
+
+@InputRequirement(Requirement.INPUT_REQUIRED)
+@Tags({"media", "file", "format", "metadata", "audio", "video", "image", 
"document", "pdf"})
+@CapabilityDescription("Extract the content metadata from flowfiles 
containing audio, video, image, and other file "
++ "types.  This processor relies on the Apache Tika project for 
file format detection and parsing.  It "
++ "extracts a long list of metadata types for media files 
including audio, video, and print media "
++ "formats."
++ "For the more details and the list of supported file types, 
visit the library's website "
++ "at http://tika.apache.org/.;)
+@WritesAttributes({@WritesAttribute(attribute = ".", description = "The extracted content metadata "
++ "will be inserted with the attribute name \".\", or \"\" if "
++ "\"Metadata Key Prefix\" is not provided.")})
+@SupportsBatching
+public class ExtractMediaMetadata extends AbstractProcessor {
+
+static final PropertyDescriptor MAX_NUMBER_OF_ATTRIBUTES = new 
PropertyDescriptor.Builder()
+.name("Max Number of Attributes")
+.description("Specify the max number of attributes to add to 
the flowfile. There is no guarantee in what order"
++ " the tags will be processed. By default it will 
process all of them.")
+.required(false)
+
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
+.build();
+
+private static final PropertyDescriptor MAX_ATTRIBUTE_LENGTH = new 
PropertyDescriptor.Builder()
+.name("Max 

[GitHub] nifi pull request #379: NIFI-1022 Added Tachyon/Alluxio processors

2016-06-21 Thread apiri
Github user apiri commented on a diff in the pull request:

https://github.com/apache/nifi/pull/379#discussion_r67958810
  
--- Diff: 
nifi-nar-bundles/nifi-alluxio-bundle/nifi-alluxio-processors/src/main/java/org/apache/nifi/processors/alluxio/GetAlluxio.java
 ---
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.alluxio;
+
+import alluxio.AlluxioURI;
+import alluxio.client.ReadType;
+import alluxio.client.file.FileInStream;
+import alluxio.client.file.URIStatus;
+import alluxio.client.file.options.OpenFileOptions;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.nifi.annotation.behavior.EventDriven;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.io.OutputStreamCallback;
+import org.apache.nifi.util.StopWatch;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+@Tags({"alluxio", "tachyon", "get", "file"})
+@EventDriven
+@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
+@CapabilityDescription("This processor will access the file using the 
input URI provided and write the content of "
++ "the remote file to the content of the incoming FlowFile.")
+public class GetAlluxio extends AbstractAlluxioProcessor {
+
+public static final PropertyDescriptor READ_TYPE = new 
PropertyDescriptor.Builder()
+.name("alluxio-read-type")
+.displayName("Read type")
+.description("The Read Type to use when accessing the remote 
file")
+.defaultValue(ReadType.CACHE_PROMOTE.toString())
+.required(true)
+.allowableValues(ReadType.values())
+.build();
+
+private final static List propertyDescriptors;
+
+// Relationships
+public static final Relationship REL_SUCCESS = new 
Relationship.Builder()
+.name("success")
+.description("All files successfully retrieved are routed to 
this relationship")
+.build();
+public static final Relationship REL_FAILURE = new 
Relationship.Builder()
+.name("failure")
+.description("In case of failure, flow files will be routed to 
this relationship")
+.autoTerminateDefault(true)
+.build();
+public static final Relationship REL_SUCCESS_REQ = new 
Relationship.Builder()
+.name("original")
+.description("In case of success, the original FlowFile will 
be routed to this relationship")
+.autoTerminateDefault(true)
+.build();
+
+private final static Set relationships;
+
+
+/*
+ * Will ensure that the list of property descriptors is build only 
once.
+ * Will also create a Set of relationships
+ */
+static {
+List _propertyDescriptors = new ArrayList<>();
+_propertyDescriptors.addAll(descriptors);
+_propertyDescriptors.add(READ_TYPE);
+propertyDescriptors = 
Collections.unmodifiableList(_propertyDescriptors);
+
+Set _relationships = new HashSet<>();
+_relationships.add(REL_SUCCESS);
+_relationships.add(REL_FAILURE);
+

[GitHub] nifi pull request #379: NIFI-1022 Added Tachyon/Alluxio processors

2016-06-21 Thread apiri
Github user apiri commented on a diff in the pull request:

https://github.com/apache/nifi/pull/379#discussion_r67958464
  
--- Diff: 
nifi-nar-bundles/nifi-alluxio-bundle/nifi-alluxio-processors/src/main/java/org/apache/nifi/processors/alluxio/AbstractAlluxioProcessor.java
 ---
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.alluxio;
+
+import alluxio.client.file.FileSystem;
+import alluxio.client.file.URIStatus;
+
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.util.StandardValidators;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * AbstractCassandraProcessor is a base class for Alluxio processors and 
contains logic and variables common to most
+ * processors integrating with Alluxio.
+ */
+public abstract class AbstractAlluxioProcessor extends AbstractProcessor {
+
+public static final PropertyDescriptor MASTER_HOSTNAME = new 
PropertyDescriptor.Builder()
+.name("alluxio-master-ip")
+.displayName("Master hostname")
+.description("Hostname of the Alluxio File System Master 
node.")
+.required(true)
+.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+.build();
+
+public static final PropertyDescriptor MASTER_PORT = new 
PropertyDescriptor.Builder()
+.name("alluxio-master-port")
+.displayName("Master port")
+.description("Port to use when connecting to the Alluxio File 
System Master node.")
+.required(true)
+.defaultValue("19998")
+.addValidator(StandardValidators.PORT_VALIDATOR)
+.build();
+
+public static final PropertyDescriptor URI = new 
PropertyDescriptor.Builder()
+.name("alluxio-uri")
+.displayName("URI")
+.description("Alluxio URI to use. Example: /path")
--- End diff --

Oh, good to know and that is super helpful.  Apologies for overlooking it 
as I sometimes neglect JIRA while in GitHub land.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #379: NIFI-1022 Added Tachyon/Alluxio processors

2016-06-21 Thread pvillard31
Github user pvillard31 commented on the issue:

https://github.com/apache/nifi/pull/379
  
Thanks for the review @apiri!
I still need to take into account some of your comments but I rebased the 
PR against master. While doing some additional testing, for a reason I can't 
explain yet, I was unable to use the ListProcessor:


2016-06-21 23:28:56,127 WARN [Timer-Driven Process Thread-9] 
o.a.n.c.t.ContinuallyRunProcessorTask Administratively Yielding 
ListAlluxio[id=42835c14-3896-47e0-95ae-ce8266e7c030] due to uncaught Exception: 
java.lang.NullPointerException: Name is null
2016-06-21 23:28:56,130 WARN [Timer-Driven Process Thread-9] 
o.a.n.c.t.ContinuallyRunProcessorTask 
java.lang.NullPointerException: Name is null
at java.lang.Enum.valueOf(Enum.java:236) ~[na:1.8.0_77]
at 
alluxio.exception.AlluxioExceptionType.valueOf(AlluxioExceptionType.java:19) 
~[na:na]
at alluxio.exception.AlluxioException.from(AlluxioException.java:69) 
~[na:na]
at alluxio.AbstractClient.retryRPC(AbstractClient.java:324) ~[na:na]
at 
alluxio.client.file.FileSystemMasterClient.listStatus(FileSystemMasterClient.java:271)
 ~[na:na]
at 
alluxio.client.file.BaseFileSystem.listStatus(BaseFileSystem.java:188) ~[na:na]
at 
alluxio.client.file.BaseFileSystem.listStatus(BaseFileSystem.java:179) ~[na:na]
at 
org.apache.nifi.processors.alluxio.ListAlluxio.onTrigger(ListAlluxio.java:91) 
~[na:na]
at 
org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27)
 ~[nifi-api-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at 
org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1077)
 ~[nifi-framework-core-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at 
org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136)
 [nifi-framework-core-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at 
org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47)
 [nifi-framework-core-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at 
org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:123)
 [nifi-framework-core-1.0.0-SNAPSHOT.jar:1.0.0-SNAPSHOT]
at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
[na:1.8.0_77]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) 
[na:1.8.0_77]
at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
 [na:1.8.0_77]
at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
 [na:1.8.0_77]
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) 
[na:1.8.0_77]
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) 
[na:1.8.0_77]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_77]


I'll try to dig more into this tomorrow. Otherwise, a guy from Alluxio 
reached me to propose his help if needed on this PR, I guess I can ask him to 
have a look and make some comments.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #548: NIFI-2045 - Removing mqtt message from queue after a...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/548#discussion_r67957184
  
--- Diff: 
nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
 ---
@@ -288,9 +289,13 @@ public void process(final OutputStream out) throws 
IOException {
 
 String transitUri = new 
StringBuilder(broker).append(mqttMessage.getTopic()).toString();
 session.getProvenanceReporter().receive(messageFlowfile, 
transitUri);
+String uuid = 
messageFlowfile.getAttribute(CoreAttributes.UUID.key());
 session.transfer(messageFlowfile, REL_MESSAGE);
-mqttQueue.remove(mqttMessage);
 session.commit();
+if (!mqttQueue.remove(mqttMessage)) {
--- End diff --

In order to avoid concatenating the Strings in the logger message 
needlessly, this should check if warn is enabled too: 
`... && getLogger().isWarnEnabled()`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #548: NIFI-2045 - Removing mqtt message from queue after a...

2016-06-21 Thread JPercivall
Github user JPercivall commented on a diff in the pull request:

https://github.com/apache/nifi/pull/548#discussion_r67957067
  
--- Diff: 
nifi-nar-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/main/java/org/apache/nifi/processors/mqtt/ConsumeMQTT.java
 ---
@@ -288,9 +289,13 @@ public void process(final OutputStream out) throws 
IOException {
 
 String transitUri = new 
StringBuilder(broker).append(mqttMessage.getTopic()).toString();
 session.getProvenanceReporter().receive(messageFlowfile, 
transitUri);
+String uuid = 
messageFlowfile.getAttribute(CoreAttributes.UUID.key());
--- End diff --

A bit of a nit pick, this is only referenced in the logger statement below 
so it should be created in there, not outside of it.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #379: NIFI-1022 Added Tachyon/Alluxio processors

2016-06-21 Thread pvillard31
Github user pvillard31 commented on a diff in the pull request:

https://github.com/apache/nifi/pull/379#discussion_r67956841
  
--- Diff: 
nifi-nar-bundles/nifi-alluxio-bundle/nifi-alluxio-processors/src/main/java/org/apache/nifi/processors/alluxio/AbstractAlluxioProcessor.java
 ---
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors.alluxio;
+
+import alluxio.client.file.FileSystem;
+import alluxio.client.file.URIStatus;
+
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.util.StandardValidators;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * AbstractCassandraProcessor is a base class for Alluxio processors and 
contains logic and variables common to most
+ * processors integrating with Alluxio.
+ */
+public abstract class AbstractAlluxioProcessor extends AbstractProcessor {
+
+public static final PropertyDescriptor MASTER_HOSTNAME = new 
PropertyDescriptor.Builder()
+.name("alluxio-master-ip")
+.displayName("Master hostname")
+.description("Hostname of the Alluxio File System Master 
node.")
+.required(true)
+.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+.build();
+
+public static final PropertyDescriptor MASTER_PORT = new 
PropertyDescriptor.Builder()
+.name("alluxio-master-port")
+.displayName("Master port")
+.description("Port to use when connecting to the Alluxio File 
System Master node.")
+.required(true)
+.defaultValue("19998")
+.addValidator(StandardValidators.PORT_VALIDATOR)
+.build();
+
+public static final PropertyDescriptor URI = new 
PropertyDescriptor.Builder()
+.name("alluxio-uri")
+.displayName("URI")
+.description("Alluxio URI to use. Example: /path")
--- End diff --

I assumed the user would use expression language to define the file name 
with Get/Put. Something like /${filename} for Put, and /${alluxio_name} for 
Get. But if this is unclear, I certainly need to update descriptions. FYI, in 
the JIRA, I added one of the templates I used when testing the processors.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #523: [NIFI-1879] Responsive dialogs and dialog UX refresh

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/523


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #555: NIFI-2067 ignored intermittently failing MemoryTest

2016-06-21 Thread JPercivall
Github user JPercivall commented on the issue:

https://github.com/apache/nifi/pull/555
  
Nope I am missing it, sorry about that. Once the travis build finishes 
successfully I am +1


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #553: NIFI-2063 - Install Script Relative Path Mismatch from Init...

2016-06-21 Thread YolandaMDavis
Github user YolandaMDavis commented on the issue:

https://github.com/apache/nifi/pull/553
  
@jvwing thanks james for.  Makes sense on all fronts. I'm working on an 
update now.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #555: NIFI-2067 ignored intermittently failing MemoryTest

2016-06-21 Thread JPercivall
Github user JPercivall commented on the issue:

https://github.com/apache/nifi/pull/555
  
In cases where validateWarnWhenPercentThresholdReached fails would 
validateWarnWhenSizeThresholdReached fail too? Wondering if we should ignore 
both.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #555: NIFI-2067 ignored intermittently failing MemoryTest

2016-06-21 Thread olegz
GitHub user olegz opened a pull request:

https://github.com/apache/nifi/pull/555

NIFI-2067 ignored intermittently failing MemoryTest

left comment with the explanation as to why it was not removed

You can merge this pull request into a Git repository by running:

$ git pull https://github.com/olegz/nifi NIFI-2067

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/nifi/pull/555.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #555


commit 62e62f96113df3681431289a17a7cf9e9dbd6cd1
Author: Oleg Zhurakousky 
Date:   2016-06-21T18:54:21Z

NIFI-2067 ignored intermittently failing MemoryTest
left comment with the explanation as to why it was not removed




---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #523: [NIFI-1879] Responsive dialogs and dialog UX refresh

2016-06-21 Thread mcgilman
Github user mcgilman commented on the issue:

https://github.com/apache/nifi/pull/523
  
@scottyaslan Looks great!

The additions made since the initial PR have really improved the 
performance of various parts. I've merged this to master.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #552: NIFI-2066 dynamic port allocation in SNMP processors...

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/552


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


Re: putTCP nifi 0.7?

2016-06-21 Thread Ryan Ward
Hey Joe,

Thanks for the update and taking care of this

Ryan

On Tue, Jun 21, 2016 at 12:22 PM, Joe Percivall <
joeperciv...@yahoo.com.invalid> wrote:

> Hello,
>
>
> As a follow-up, the PutTCP processor was just merged into the master and
> 0.x branches and will be in the 0.7.0 release.
>  Joe
>
> - - - - - -
> Joseph Percivall
> linkedin.com/in/Percivall
> e: joeperciv...@yahoo.com
>
>
>
>
> On Thursday, June 16, 2016 4:57 PM, Joe Percivall
>  wrote:
> Hello,
>
> It is not currently marked as something that will go in 0.7.0 but I will
> review it to see where it stands. If it is able to be completed shortly
> (contributor addresses of my feedback) then it will make it in but if not
> then unfortunately it would slide to a later release.
>
>
> This ticket falls into a category of unversioned "patch available" tickets
> (tickets that don't have a fix version and have a status "patch available")
> that need to be addressed. As the Release Manager for 0.7.0 I will take a
> look at the unversioned "patch available" tickets to see which of them can
> be finished off for 0.7.0 and ping the appropriate committers to help out.
>
> Joe
> - - - - - - Joseph Percivall
> linkedin.com/in/Percivall
> e: joeperciv...@yahoo.com
>
>
>
>
>
> On Thursday, June 16, 2016 2:00 PM, Ryan Ward 
> wrote:
> Will this one make 0.7?
>
> https://issues.apache.org/jira/browse/NIFI-1834
>


[GitHub] nifi issue #552: NIFI-2066 dynamic port allocation in SNMP processors unit t...

2016-06-21 Thread olegz
Github user olegz commented on the issue:

https://github.com/apache/nifi/pull/552
  
+1, merging


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #553: NIFI-2063 - Install Script Relative Path Mismatch from Init...

2016-06-21 Thread YolandaMDavis
Github user YolandaMDavis commented on the issue:

https://github.com/apache/nifi/pull/553
  

Some screen shoots of testing update on vagrant vm



![nifi_install_start](https://cloud.githubusercontent.com/assets/1371858/16241243/d26e9d74-37ba-11e6-88d5-705b0d9767bc.jpg)

![nifi_manual_start](https://cloud.githubusercontent.com/assets/1371858/16241244/d2774686-37ba-11e6-9e8e-d6e7a8a8ea0a.jpg)

![nifi_reboot_vagrant](https://cloud.githubusercontent.com/assets/1371858/16241242/d26e5cba-37ba-11e6-98e4-2d64ace43420.jpg)



---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #554: NIFI-2032 port for 0.x

2016-06-21 Thread olegz
GitHub user olegz opened a pull request:

https://github.com/apache/nifi/pull/554

NIFI-2032 port for 0.x



You can merge this pull request into a Git repository by running:

$ git pull https://github.com/olegz/nifi NIFI-2032-0.x

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/nifi/pull/554.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #554


commit 4996856f1df1ed8a18850c3faca964e827b60cbc
Author: Oleg Zhurakousky 
Date:   2016-06-17T16:08:26Z

NIFI-2032 port for 0.x




---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #398: NIFI-1751 Added proxy authentication in InvokeHttp p...

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/398


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #553: NIFI-2063 - correct service installation problems wi...

2016-06-21 Thread YolandaMDavis
GitHub user YolandaMDavis opened a pull request:

https://github.com/apache/nifi/pull/553

NIFI-2063 - correct service installation problems with nifi-env.sh

Made changes to ensure proper execution of nifi.sh install:

1) added cp command of nifi_env.sh to /etc/init.d so it can be executed by 
/etc/init.d/nifi
2) Ensured the NIFI_HOME is properly corrected in /etc/init.d/nifi_env.sh

Test of service install, manual start and stop, was done on vagrant vm with 
centos7.0. Reboot startup was checked using chkconfig command (**chckconfig 
nifi on**) and then restarting the vm.  With the service installation 
correction the issue noted on reboot was not observed so further testing may be 
required.

You can merge this pull request into a Git repository by running:

$ git pull https://github.com/YolandaMDavis/nifi NIFI-2063

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/nifi/pull/553.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #553


commit 124693c966f466a8d9783209eafb025450e4a5a2
Author: Yolanda M. Davis 
Date:   2016-06-21T16:23:10Z

NIFI-2063 - correct service installation problems with nifi-env.sh




---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #398: NIFI-1751 Added proxy authentication in InvokeHttp processo...

2016-06-21 Thread apiri
Github user apiri commented on the issue:

https://github.com/apache/nifi/pull/398
  
@pvillard31 Sounds fair.  Will get this incorporated.  Thanks for the 
contribution.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


Re: ConvertJSONToAvro processor is slow?

2016-06-21 Thread Joe Witt
Couple of things to notice just from the screenshot...

As Andy points out the store in kite dataset processor appears to be
doing very little.  The preceding two processors are

  ConvertJSONtoAvro at 270 messages per second.  The resulting data is
far smaller than the input which is interesting.
  ReplaceText is processing at 70 messages per second

StoreInKite is doing very very little.  And I doubt back pressure is a
factor in this config as-is.

Neither of those processors appear to be actively executing threads
when this snapshot was taken.  That makes sense since neither has much
of a queue.

ListenSyslog has 4 active threads.  You likely won't need that many.

I would avoid adding 100 parallel threads to any processor.

But as for speed of the convert and replace they seem to be going as
fast as needed to receive the data...



On Tue, Jun 21, 2016 at 1:18 PM, Andy LoPresto  wrote:
> It looks like the StoreInKiteDataset processor is the issue. It has read
> 1.85 MB in, but written 0 B out. What are the back pressure settings on that
> processor?
>
>
> Andy LoPresto
> alopre...@apache.org
> alopresto.apa...@gmail.com
> PGP Fingerprint: 70EC B3E5 98A6 5A3F D3C4  BACE 3C6E F65B 2F7D EF69
>
> On Jun 21, 2016, at 9:07 AM, pradeepbill  wrote:
>
> Hi there, I am seeing huge delays in ConvertJSONToAvro processor where the
> converted data trickles down to the next step.Please see attachment.I have
> increased the concurrent tasks =100. Is there anything else I can
> configure/tune up.
>
> nifi_Capture.PNG
> 
>
> Thanks
> Pradeep
>
>
>
> --
> View this message in context:
> http://apache-nifi-developer-list.39713.n7.nabble.com/ConvertJSONToAvro-processor-is-slow-tp11957.html
> Sent from the Apache NiFi Developer List mailing list archive at Nabble.com.
>
>


Re: ConvertJSONToAvro processor is slow?

2016-06-21 Thread Andy LoPresto
Yeah, I noticed that as well after sending the message. This could still be 
because of back pressure on the Kite processor (if the Kite processor is set to 
delay incoming data until it processes a certain amount / the incoming queue is 
below a certain threshold, the previous processor will yield its execution 
until that level is met).

Are there any errors in the logs/nifi-app.log file?

Andy LoPresto
alopre...@apache.org
alopresto.apa...@gmail.com
PGP Fingerprint: 70EC B3E5 98A6 5A3F D3C4  BACE 3C6E F65B 2F7D EF69

> On Jun 21, 2016, at 9:13 AM, pradeepbill  wrote:
> 
> Thanks Andy, but I see ConvertJSONToAvro read 680MB  and wrote 2.86MB  to
> disk in ConvertJSONToAvro, is it not true ?
> 
> 
> 
> --
> View this message in context: 
> http://apache-nifi-developer-list.39713.n7.nabble.com/ConvertJSONToAvro-processor-is-slow-tp11957p11960.html
> Sent from the Apache NiFi Developer List mailing list archive at Nabble.com.



signature.asc
Description: Message signed with OpenPGP using GPGMail


[GitHub] nifi issue #551: Removed duplicate dependency declaration from nifi-standard...

2016-06-21 Thread alopresto
Github user alopresto commented on the issue:

https://github.com/apache/nifi/pull/551
  
Thanks @pvillard31 . 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #551: Removed duplicate dependency declaration from nifi-standard...

2016-06-21 Thread pvillard31
Github user pvillard31 commented on the issue:

https://github.com/apache/nifi/pull/551
  
Merged in master, thanks!


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #551: Removed duplicate dependency declaration from nifi-s...

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/551


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


Re: ConvertJSONToAvro processor is slow?

2016-06-21 Thread pradeepbill
Thanks Andy, but I see ConvertJSONToAvro read 680MB  and wrote 2.86MB  to
disk in ConvertJSONToAvro, is it not true ?



--
View this message in context: 
http://apache-nifi-developer-list.39713.n7.nabble.com/ConvertJSONToAvro-processor-is-slow-tp11957p11960.html
Sent from the Apache NiFi Developer List mailing list archive at Nabble.com.


Re: ConvertJSONToAvro processor is slow?

2016-06-21 Thread Andy LoPresto
It looks like the StoreInKiteDataset processor is the issue. It has read 1.85 
MB in, but written 0 B out. What are the back pressure settings on that 
processor?


Andy LoPresto
alopre...@apache.org
alopresto.apa...@gmail.com
PGP Fingerprint: 70EC B3E5 98A6 5A3F D3C4  BACE 3C6E F65B 2F7D EF69

> On Jun 21, 2016, at 9:07 AM, pradeepbill  wrote:
> 
> Hi there, I am seeing huge delays in ConvertJSONToAvro processor where the
> converted data trickles down to the next step.Please see attachment.I have
> increased the concurrent tasks =100. Is there anything else I can
> configure/tune up.
> 
> nifi_Capture.PNG
> 
> 
> Thanks
> Pradeep
> 
> 
> 
> --
> View this message in context: 
> http://apache-nifi-developer-list.39713.n7.nabble.com/ConvertJSONToAvro-processor-is-slow-tp11957.html
> Sent from the Apache NiFi Developer List mailing list archive at Nabble.com.



signature.asc
Description: Message signed with OpenPGP using GPGMail


[GitHub] nifi pull request #552: NIFI-2066 dynamic port allocation in SNMP processors...

2016-06-21 Thread pvillard31
GitHub user pvillard31 opened a pull request:

https://github.com/apache/nifi/pull/552

NIFI-2066 dynamic port allocation in SNMP processors unit tests



You can merge this pull request into a Git repository by running:

$ git pull https://github.com/pvillard31/nifi NIFI-2066

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/nifi/pull/552.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #552


commit efbbbcfa935a49705940b27d96f6b8711d9e4b60
Author: Pierre Villard 
Date:   2016-06-21T17:16:43Z

NIFI-2066 dynamic port allocation in SNMP processors unit tests




---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi pull request #551: Removed duplicate dependency declaration from nifi-s...

2016-06-21 Thread alopresto
GitHub user alopresto opened a pull request:

https://github.com/apache/nifi/pull/551

Removed duplicate dependency declaration from nifi-standard-processor…

…s/pom.xml.

Simple PR for duplicated dependency declaration (probably a copy/paste 
error). Did not open Jira for this. 

You can merge this pull request into a Git repository by running:

$ git pull https://github.com/alopresto/nifi duplicate-dependency

Alternatively you can review and apply these changes as the patch at:

https://github.com/apache/nifi/pull/551.patch

To close this pull request, make a commit to your master/trunk branch
with (at least) the following in the commit message:

This closes #551


commit dab888b1c8f62658ddcb24ec5868dbb243b3cbc3
Author: Andy LoPresto 
Date:   2016-06-21T17:12:01Z

Removed duplicate dependency declaration from 
nifi-standard-processors/pom.xml.




---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


ConvertJSONToAvro processor is slow?

2016-06-21 Thread pradeepbill
Hi there, I am seeing huge delays in ConvertJSONToAvro processor where the
converted data trickles down to the next step.Please see attachment.I have
increased the concurrent tasks =100. Is there anything else I can
configure/tune up.

nifi_Capture.PNG

  

Thanks
Pradeep



--
View this message in context: 
http://apache-nifi-developer-list.39713.n7.nabble.com/ConvertJSONToAvro-processor-is-slow-tp11957.html
Sent from the Apache NiFi Developer List mailing list archive at Nabble.com.


Re: putTCP nifi 0.7?

2016-06-21 Thread Joe Percivall
Hello,


As a follow-up, the PutTCP processor was just merged into the master and 0.x 
branches and will be in the 0.7.0 release.
 Joe

- - - - - - 
Joseph Percivall
linkedin.com/in/Percivall
e: joeperciv...@yahoo.com




On Thursday, June 16, 2016 4:57 PM, Joe Percivall 
 wrote:
Hello,

It is not currently marked as something that will go in 0.7.0 but I will review 
it to see where it stands. If it is able to be completed shortly (contributor 
addresses of my feedback) then it will make it in but if not then unfortunately 
it would slide to a later release.


This ticket falls into a category of unversioned "patch available" tickets 
(tickets that don't have a fix version and have a status "patch available") 
that need to be addressed. As the Release Manager for 0.7.0 I will take a look 
at the unversioned "patch available" tickets to see which of them can be 
finished off for 0.7.0 and ping the appropriate committers to help out.

Joe
- - - - - - Joseph Percivall
linkedin.com/in/Percivall
e: joeperciv...@yahoo.com





On Thursday, June 16, 2016 2:00 PM, Ryan Ward  wrote:
Will this one make 0.7?

https://issues.apache.org/jira/browse/NIFI-1834


[GitHub] nifi pull request #543: NIFI-1834 Create PutTCP Processor

2016-06-21 Thread asfgit
Github user asfgit closed the pull request at:

https://github.com/apache/nifi/pull/543


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #543: NIFI-1834 Create PutTCP Processor

2016-06-21 Thread JPercivall
Github user JPercivall commented on the issue:

https://github.com/apache/nifi/pull/543
  
Also, for the sake of transparency, we had a discussion offline debating 
the various directions to go to handle the "consistent inconsistency" and had 
the idea for a way for the user to check a box in the UI for whether to enter 
literal or interpreted text. I created a ticket for it here[1].

For now, in order to keep consistent with the other processors in this area 
that do automatically convert, the PutTCP processor will convert a "\n" entered 
in the UI to a new line character (also tabs and carriage returns).

[1] https://issues.apache.org/jira/browse/NIFI-2069


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #543: NIFI-1834 Create PutTCP Processor

2016-06-21 Thread JPercivall
Github user JPercivall commented on the issue:

https://github.com/apache/nifi/pull/543
  
Thanks @olegz, will merge it in


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #543: NIFI-1834 Create PutTCP Processor

2016-06-21 Thread olegz
Github user olegz commented on the issue:

https://github.com/apache/nifi/pull/543
  
Ok, consistency wins so a somewhat reluctant +1 :)


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


[GitHub] nifi issue #362: NIFI-1769: added support for SSE-KMS and signature s3v4 aut...

2016-06-21 Thread miquillo
Github user miquillo commented on the issue:

https://github.com/apache/nifi/pull/362
  
@jvwing Thanks for picking this up! 

- I would 100% apply this to the AbstractS3processor. By the time I figured 
it myself, I already had implemented it (in the wrong class). Should be easy to 
move to the AbstractS3processor, not much code is affected by this PR. 
- Definitely make this a combobox / selectable option. It was just a quick 
fix to us to make it a true/false flag as we are only using Signature v4. 
Indeed makes it futureproof. 
- I am completely unaware of the ClientConfiguration::setSignerOverride() 
and I do also dislike the system.setproperty(..). If it works the same way; 
definitely go for it. 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---


Re: Suggestion of processors

2016-06-21 Thread Aldrin Piri
Andre,

The proposed processors all sound like nice functionality.  I did have a
couple of questions.

Concerning the ParseKV, are you aware of the getDelimitedField[1] function
in Expression Language?  I think this may take care of this case for
handling these items.

With the QueryBulkWhois API, does it make sense to roll this into the
QueryDNS as a configurable property to do batch?  Performing a cursory
review of the PR, it looks like this would potentially be targeting those
same servers.  Are batch lookups to more web service oriented endpoints as
opposed to just querying DNS?

--aldrin

[1]
https://nifi.apache.org/docs/nifi-docs/html/expression-language-guide.html#getdelimitedfield

On Sat, Jun 18, 2016 at 11:14 PM, Andre  wrote:

> Johny,
>
> I haven't used graylog heavily so would you mind clarifying what do you
> mean by
>
> "Would those work like graylog also"
>
>
>
> Cheers
>
> On Sun, Jun 19, 2016 at 12:48 PM, johny casanova <
> computertech2...@gmail.com
> > wrote:
>
> > Great idea! Would those work like graylog also?
> > On Jun 18, 2016 9:30 PM, "Andre"  wrote:
> >
> > > Devs,
> > >
> > > I am continuing to drive the migration of our logging pipeline to NiFi
> > and
> > > in the process identified some areas of log processing that could be
> > > improved by the introduction of new processors.
> > >
> > > I wonder Would anyone oppose the idea of introducing the following
> > > processors:
> > >
> > >
> > > 1. ParseCEF (think of it like logstash-codec-cef)
> > > Processor to parse CEF format - (
> > > https://www.protect724.hpe.com/docs/DOC-1072);
> > > CEF attributes would be converted into NiFi FlowFiles attributes;
> > >
> > >
> > > 2. ParseKV (think of it like Splunk's kv parser)
> > > A processor to split strings by keys and values (delimiter based) would
> > be
> > > added to FlowFIle attributes;
> > > Parser would support extracting multiple instances of the same key via
> > > attributes like parse.kv.key_name.0 , parse.kv.key_name.1, etc)
> > >
> > >
> > > 3. QueryBulkWhoisAPI
> > > This processor would read a batch of Flowfiles, extract the appropriate
> > > field (e.g. ip address), make the batch whois query, parse results and
> > then
> > > append results to individual FlowFiles.
> > >
> > > This processor would complement QueryDNS (PR#496). QueryDNS only makes
> > > individual queries and depending on API access conditions it may lead
> to
> > > blacklisting. Some providers will license access (e.g. Spamhaus RBLs),
> > > while others (e.g. SHadowServer) suggest instead the use of bulk
> queries.
> > >
> > >
> > > Keen to hear your opinion
> > >
> >
>


0.6.1 & 0.7.0 chkconfig (start on boot) bug centos 7

2016-06-21 Thread Ryan H
I'm trying to get the 0.7.0 NiFi to start on boot on linux/centos 7.
During all this, I've noticed 0.6.1 doesn't quite work either, left some
notes at the bottom about that.

*For 0.7.0:*
*I followed the modified install commands for the nifi.sh script:*
I untar'd it in:
 #/opt/nifi/current -> nifi-0.7.0-SNAPSHOT

I followed these steps:
##Edited the nifi.sh script for the SCRIPT_DIR issue.
#/opt/nifi/current/bin/nifi.sh install
*#chkconfig nifi on  <--- Turn on for boot 2345 run levels*
#service nifi start

NiFi is now started.

I reboot the box.

*NiFi does not start.*  There's no logs in /var/log/messages or
/opt/nifi/current/logs indicating why.  (This script should probably log
someplace)

*Why?*
The current script has a command that starts as:
#cd ${NIFI_HOME} && sudo -u ${run_as}  &

The sudo part is omitted if there is no ${run_as} user defined. This works
for starting the service by hand. However, if this script is set to start
on boot with a ${run_as} user, in this case using chkconfig, it will
silently fail when starting on boot because of the "sudo" part.  Not sure
why "sudo" isn't well liked in CentOS 7 in a service script.

*How we fixed it:*
Fixed by structuring the command like this the nifi.sh script like this:

Old Command:
## RUN_NIFI_CMD="cd "\""${NIFI_HOME}"\"" && ${sudo_cmd_prefix}
"\""${JAVA}"\"" -cp "\""${BOOTSTRAP_CLASSPATH}"\"" -Xms12m -Xmx24m
${BOOTSTRAP_DIR_PARAMS}  org.apache.nifi.bootstrap.RunNiFi"

Put it into the if's:
#if [ "$1" = "start" ]; then
#RUN_NIFI_CMD="su -c "\""cd "\""${NIFI_HOME}"\"" && "\""${JAVA}"\""
-cp "\""${BOOTSTRAP_CLASSPATH}"\"" -Xms12m -Xmx24m ${BOOTSTRAP_DIR_PARAMS}
 org.apache.nifi.bootstrap.RunNiFi $@ &"\"" ${run_as}"
#(eval $RUN_NIFI_CMD)
#else
#RUN_NIFI_CMD="su -c "\""cd "\""${NIFI_HOME}"\"" && "\""${JAVA}"\""
-cp "\""${BOOTSTRAP_CLASSPATH}"\"" -Xms12m -Xmx24m ${BOOTSTRAP_DIR_PARAMS}
 org.apache.nifi.bootstrap.RunNiFi $@"\"" ${run_as}"
#(eval $RUN_NIFI_CMD)
#fi


It now starts on boot.



*Logging in this file:*
* I created a /var/log/nifi dir as root
* I started piping echo statements to "tee -a /var/log/nifi/init.log"
** *Example: #echo "Attempting to start NiFi" | tee -a
/var/log/nifi/init.log



*For 0.6.1:*
*I followed the standard install commands for the nifi.sh script.*

I untar'd it in:
 #/opt/nifi/current -> nifi-0.7.0-SNAPSHOT

I followed these steps:
#/opt/nifi/current/bin/nifi.sh install
*#chkconfig nifi on  <--- Turn on for boot 2345 run levels*
#service nifi start

NiFi is now started.

I reboot the box.

*NiFi does not start.*  There's no logs in /var/log/messages or
/opt/nifi/current/logs indicating why.  (This script should probably log
someplace)

*Why?*
The current script has a command that starts as:
#cd ${NIFI_HOME} && sudo -u ${run_as}  &

The sudo part is omitted if there is no ${run_as} user defined. This works
for starting the service by hand. However, if this script is set to start
on boot with a ${run_as} user, in this case using chkconfig, it will
silently fail when starting on boot because of the "sudo" part.  Not sure
why "sudo" isn't well liked in CentOS 7 in a service script.

*How we fixed it:*
Fixed by structuring the command like this:
#su -c "cd ${NIFI_HOME} &&  &" ${run_as}

This works when starting on boot if you have a ${run_as} user defined,
though not sure of the behavior if there is no ${run_as} user defined or if
the ${run_as} user is root.
--

Thanks,
Ryan


[GitHub] nifi pull request #496: NIFI-1965 - Implement QueryDNS Processor

2016-06-21 Thread pvillard31
Github user pvillard31 commented on a diff in the pull request:

https://github.com/apache/nifi/pull/496#discussion_r67825492
  
--- Diff: 
nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/AbstractEnrichProcessor.java
 ---
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nifi.processors;
+
+
+import org.apache.nifi.components.AllowableValue;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationContext;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.processor.AbstractProcessor;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.util.StandardValidators;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public abstract class AbstractEnrichProcessor extends AbstractProcessor {
+public static final PropertyDescriptor QUERY_INPUT = new 
PropertyDescriptor.Builder()
+.name("Format the query should be to be executed ")
--- End diff --

Add also .displayName() as suggested in 
https://www.google.fr/url?sa=t=j==s=web=1=rja=8=0ahUKEwjpvrP21rjNAhUIK8AKHba_BkEQFggeMAA=https%3A%2F%2Fmail-archives.apache.org%2Fmod_mbox%2Fnifi-dev%2F201605.mbox%2F%253C5A6FDF1E-1889-46FE-A3C4-5D2F0A905979%40apache.org%253E=AFQjCNGyZCltF9pax32klWfgeEuHibsz5g=3015bdN37ZEZZEzkIOInJg


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---