This is an automated email from the ASF dual-hosted git repository.

ottersbach pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


The following commit(s) were added to refs/heads/main by this push:
     new 8c2ceac6a4 NIFI-14108 Removed the instantiation of extraneous Object 
arrays vararg methods. (#9594)
8c2ceac6a4 is described below

commit 8c2ceac6a4cf3af11935feeed078f5c82852f309
Author: dan-s1 <[email protected]>
AuthorDate: Wed Dec 25 03:48:29 2024 -0500

    NIFI-14108 Removed the instantiation of extraneous Object arrays vararg 
methods. (#9594)
    
    Signed-off-by: Lucas Ottersbach <[email protected]>
---
 .../util/file/transfer/GetFileTransfer.java        |  2 +-
 .../processors/hadoop/AbstractHadoopProcessor.java |  2 +-
 .../processor/util/pattern/RollbackOnFailure.java  |  2 +-
 .../apache/nifi/processors/hadoop/MoveHDFS.java    |  6 ++---
 .../processors/hadoop/SequenceFileWriterImpl.java  |  2 +-
 .../SiteToSiteProvenanceReportingTask.java         |  2 +-
 .../reporting/SiteToSiteStatusReportingTask.java   |  2 +-
 .../nifi/processors/splunk/PutSplunkHTTP.java      |  2 +-
 .../splunk/QuerySplunkIndexingStatus.java          |  2 +-
 .../standard/AbstractQueryDatabaseTable.java       |  2 +-
 .../processors/standard/ConvertCharacterSet.java   |  2 +-
 .../apache/nifi/processors/standard/DebugFlow.java | 28 +++++++++++-----------
 .../apache/nifi/processors/standard/FetchFile.java |  8 +++----
 .../processors/standard/HandleHttpResponse.java    |  2 +-
 .../apache/nifi/processors/standard/ListFile.java  |  6 ++---
 .../nifi/processors/standard/MergeContent.java     | 10 ++++----
 .../servlets/ContentAcknowledgmentServlet.java     |  6 ++---
 .../apache/nifi/http/StandardHttpContextMap.java   |  2 +-
 .../org/apache/nifi/lookup/RestLookupService.java  |  2 +-
 19 files changed, 45 insertions(+), 45 deletions(-)

diff --git 
a/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java
 
b/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java
index 88c8e9f18c..a6b65db771 100644
--- 
a/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java
+++ 
b/nifi-extension-bundles/nifi-extension-utils/nifi-file-transfer/src/main/java/org/apache/nifi/processor/util/file/transfer/GetFileTransfer.java
@@ -312,6 +312,6 @@ public abstract class GetFileTransfer extends 
AbstractProcessor {
         }
 
         getLogger().info("Obtained file listing in {} milliseconds; listing 
had {} items, {} of which were new",
-                new Object[]{millis, listing.size(), newItems});
+                millis, listing.size(), newItems);
     }
 }
diff --git 
a/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
 
b/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
index 5c0e156855..842f2d7b20 100644
--- 
a/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
+++ 
b/nifi-extension-bundles/nifi-extension-utils/nifi-hadoop-utils/src/main/java/org/apache/nifi/processors/hadoop/AbstractHadoopProcessor.java
@@ -371,7 +371,7 @@ public abstract class AbstractHadoopProcessor extends 
AbstractProcessor implemen
 
         final Path workingDir = fs.getWorkingDirectory();
         getLogger().info("Initialized a new HDFS File System with working dir: 
{} default block size: {} default replication: {} config: {}",
-                new Object[]{workingDir, fs.getDefaultBlockSize(workingDir), 
fs.getDefaultReplication(workingDir), config.toString()});
+                workingDir, fs.getDefaultBlockSize(workingDir), 
fs.getDefaultReplication(workingDir), config.toString());
 
         return new HdfsResources(config, fs, ugi, kerberosUser);
     }
diff --git 
a/nifi-extension-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
 
b/nifi-extension-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
index 8edd1b791f..ab40d0d030 100644
--- 
a/nifi-extension-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
+++ 
b/nifi-extension-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
@@ -132,7 +132,7 @@ public class RollbackOnFailure {
             if (adjusted != null) {
                 if (logger.isDebugEnabled()) {
                     logger.debug("Adjusted {} to {} based on context 
rollbackOnFailure={}, processedCount={}, transactional={}",
-                            new Object[]{t, adjusted, c.isRollbackOnFailure(), 
c.getProcessedCount(), c.isTransactional()});
+                            t, adjusted, c.isRollbackOnFailure(), 
c.getProcessedCount(), c.isTransactional());
                 }
                 return adjusted;
             }
diff --git 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
index fbd3cc785e..578d0cc586 100644
--- 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
+++ 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
@@ -377,20 +377,20 @@ public class MoveHDFS extends AbstractHadoopProcessor {
                                 // Remove destination file (newFile) to replace
                                 if (hdfs.delete(newFile, false)) {
                                     getLogger().info("deleted {} in order to 
replace with the contents of {}",
-                                            new Object[]{newFile, flowFile});
+                                            newFile, flowFile);
                                 }
                                 break;
                             case IGNORE_RESOLUTION:
                                 session.transfer(flowFile, REL_SUCCESS);
                                 getLogger().info(
                                         "transferring {} to success because 
file with same name already exists",
-                                        new Object[]{flowFile});
+                                        flowFile);
                                 return null;
                             case FAIL_RESOLUTION:
                                 session.transfer(session.penalize(flowFile), 
REL_FAILURE);
                                 getLogger().warn(
                                         "penalizing {} and routing to failure 
because file with same name already exists",
-                                        new Object[]{flowFile});
+                                        flowFile);
                                 return null;
                             default:
                                 break;
diff --git 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java
 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java
index f794c3b485..efc1faceea 100644
--- 
a/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java
+++ 
b/nifi-extension-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java
@@ -107,7 +107,7 @@ public class SequenceFileWriterImpl implements 
SequenceFileWriter {
             }
         });
         logger.debug("Wrote Sequence File {} ({}).",
-                new Object[]{sequenceFilename, 
watch.calculateDataRate(flowFile.getSize())});
+                sequenceFilename, watch.calculateDataRate(flowFile.getSize()));
         return sfFlowFile;
     }
 
diff --git 
a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteProvenanceReportingTask.java
 
b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteProvenanceReportingTask.java
index 2b2b539aa6..8045e30c3e 100644
--- 
a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteProvenanceReportingTask.java
+++ 
b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteProvenanceReportingTask.java
@@ -325,7 +325,7 @@ public class SiteToSiteProvenanceReportingTask extends 
AbstractSiteToSiteReporti
 
                 final long transferMillis = 
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
                 getLogger().info("Successfully sent {} Provenance Events to 
destination in {} ms; Transaction ID = {}; First Event ID = {}",
-                        new Object[] {events.size(), transferMillis, 
transactionId, events.get(0).getEventId()});
+                        events.size(), transferMillis, transactionId, 
events.get(0).getEventId());
             } catch (final Exception e) {
                 if (transaction != null) {
                     transaction.error();
diff --git 
a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java
 
b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java
index 456ab9e5ef..732a1a9617 100644
--- 
a/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java
+++ 
b/nifi-extension-bundles/nifi-site-to-site-reporting-bundle/nifi-site-to-site-reporting-task/src/main/java/org/apache/nifi/reporting/SiteToSiteStatusReportingTask.java
@@ -188,7 +188,7 @@ public class SiteToSiteStatusReportingTask extends 
AbstractSiteToSiteReportingTa
 
                 final long transferMillis = 
TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
                 getLogger().info("Successfully sent {} Status Records to 
destination in {} ms; Transaction ID = {}",
-                        new Object[]{jsonArray.size(), transferMillis, 
transactionId});
+                        jsonArray.size(), transferMillis, transactionId);
 
                 fromIndex = toIndex;
                 toIndex = Math.min(fromIndex + batchSize, jsonArray.size());
diff --git 
a/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
 
b/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
index 4452da0fb1..c09875c40e 100644
--- 
a/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
+++ 
b/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
@@ -188,7 +188,7 @@ public class PutSplunkHTTP extends SplunkAPICall {
                     // fall-through
                 default:
                     getLogger().error("Putting data into Splunk was not 
successful. Response with header {} was: {}",
-                            new Object[] {responseMessage.getStatus(), 
IOUtils.toString(responseMessage.getContent(), "UTF-8")});
+                            responseMessage.getStatus(), 
IOUtils.toString(responseMessage.getContent(), "UTF-8"));
             }
         } catch (final Exception e) {
             getLogger().error("Error during communication with Splunk: {}", 
e.getMessage(), e);
diff --git 
a/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/QuerySplunkIndexingStatus.java
 
b/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/QuerySplunkIndexingStatus.java
index 80dd552677..314d51c2b4 100644
--- 
a/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/QuerySplunkIndexingStatus.java
+++ 
b/nifi-extension-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/QuerySplunkIndexingStatus.java
@@ -170,7 +170,7 @@ public class QuerySplunkIndexingStatus extends 
SplunkAPICall {
 
             if (!sentAt.isPresent() || !ackId.isPresent()) {
                 getLogger().error("Flow file ({}) attributes {} and {} are 
expected to be set using 64-bit integer values!",
-                        new Object[]{flowFile.getId(), 
SplunkAPICall.RESPONDED_AT_ATTRIBUTE, 
SplunkAPICall.ACKNOWLEDGEMENT_ID_ATTRIBUTE});
+                        flowFile.getId(), 
SplunkAPICall.RESPONDED_AT_ATTRIBUTE, 
SplunkAPICall.ACKNOWLEDGEMENT_ID_ATTRIBUTE);
                 session.transfer(flowFile, RELATIONSHIP_FAILURE);
             } else {
                 undetermined.put(ackId.get(), flowFile);
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
index 4d6e9d2ced..d005771d5b 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
@@ -436,7 +436,7 @@ public abstract class AbstractQueryDatabaseTable extends 
AbstractDatabaseFetchPr
                         sqlWriter.updateCounters(session);
 
                         logger.debug("{} contains {} records; transferring to 
'success'",
-                                new Object[]{fileToProcess, nrOfRows.get()});
+                                fileToProcess, nrOfRows.get());
 
                         session.getProvenanceReporter().receive(fileToProcess, 
jdbcURL, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
                         resultSetFlowFiles.add(fileToProcess);
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
index a45fbf864b..cc55538630 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java
@@ -158,7 +158,7 @@ public class ConvertCharacterSet extends AbstractProcessor {
 
             session.getProvenanceReporter().modifyContent(flowFile, 
stopWatch.getElapsed(TimeUnit.MILLISECONDS));
             logger.info("successfully converted characters from {} to {} for 
{}",
-                    new Object[]{inputCharset, outputCharset, flowFile});
+                    inputCharset, outputCharset, flowFile);
             session.transfer(flowFile, REL_SUCCESS);
         } catch (final Exception e) {
             throw new ProcessException(e);
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java
index 149b0e1070..28273d81d1 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java
@@ -417,7 +417,7 @@ public class DebugFlow extends AbstractProcessor {
                             } catch (InstantiationException | 
IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
                                 if (logger.isErrorEnabled()) {
                                     logger.error("{} unexpected exception 
throwing DebugFlow exception: {}",
-                                        new Object[] {this, e});
+                                            this, e);
                                 }
                             }
                         } else {
@@ -461,8 +461,8 @@ public class DebugFlow extends AbstractProcessor {
                         if (flowFileCurrSuccess < flowFileMaxSuccess) {
                             flowFileCurrSuccess += 1;
                             logger.info("DebugFlow transferring to success 
file={} UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             session.transfer(ff, REL_SUCCESS);
                             break;
                         } else {
@@ -475,8 +475,8 @@ public class DebugFlow extends AbstractProcessor {
                         if (flowFileCurrFailure < flowFileMaxFailure) {
                             flowFileCurrFailure += 1;
                             logger.info("DebugFlow transferring to failure 
file={} UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             session.transfer(ff, REL_FAILURE);
                             break;
                         } else {
@@ -489,8 +489,8 @@ public class DebugFlow extends AbstractProcessor {
                         if (flowFileCurrRollback < flowFileMaxRollback) {
                             flowFileCurrRollback += 1;
                             logger.info("DebugFlow rolling back (no penalty) 
file={} UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             session.rollback();
                             break;
                         } else {
@@ -503,8 +503,8 @@ public class DebugFlow extends AbstractProcessor {
                         if (flowFileCurrYield < flowFileMaxYield) {
                             flowFileCurrYield += 1;
                             logger.info("DebugFlow yielding file={} UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             session.rollback();
                             context.yield();
                             return;
@@ -518,8 +518,8 @@ public class DebugFlow extends AbstractProcessor {
                         if (flowFileCurrPenalty < flowFileMaxPenalty) {
                             flowFileCurrPenalty += 1;
                             logger.info("DebugFlow rolling back (with penalty) 
file={} UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             session.rollback(true);
                             break;
                         } else {
@@ -533,8 +533,8 @@ public class DebugFlow extends AbstractProcessor {
                             flowFileCurrException += 1;
                             String message = "forced by " + 
this.getClass().getName();
                             logger.info("DebugFlow throwing NPE file={} 
UUID={}",
-                                new Object[] 
{ff.getAttribute(CoreAttributes.FILENAME.key()),
-                                    
ff.getAttribute(CoreAttributes.UUID.key())});
+                                    
ff.getAttribute(CoreAttributes.FILENAME.key()),
+                                    
ff.getAttribute(CoreAttributes.UUID.key()));
                             RuntimeException rte;
                             try {
                                 rte = 
flowFileExceptionClass.getConstructor(String.class).newInstance(message);
@@ -542,7 +542,7 @@ public class DebugFlow extends AbstractProcessor {
                             } catch (InstantiationException | 
IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
                                 if (logger.isErrorEnabled()) {
                                     logger.error("{} unexpected exception 
throwing DebugFlow exception: {}",
-                                        new Object[] {this, e});
+                                            this, e);
                                 }
                             }
                         } else {
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
index e16aaf7aa3..6b3e34f97b 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
@@ -251,7 +251,7 @@ public class FetchFile extends AbstractProcessor {
         Path filePath = file.toPath();
         if (!Files.exists(filePath) && !Files.notExists(filePath)) { // see 
https://docs.oracle.com/javase/tutorial/essential/io/check.html for more details
             getLogger().log(levelFileNotFound, "Could not fetch file {} from 
file system for {} because the existence of the file cannot be verified; 
routing to failure",
-                    new Object[] {file, flowFile});
+                    file, flowFile);
             session.transfer(session.penalize(flowFile), REL_FAILURE);
             return;
         } else if (!Files.exists(filePath)) {
@@ -265,7 +265,7 @@ public class FetchFile extends AbstractProcessor {
         final String user = System.getProperty("user.name");
         if (!isReadable(file)) {
             getLogger().log(levelPermDenied, "Could not fetch file {} from 
file system for {} due to user {} not having sufficient permissions to read the 
file; routing to permission.denied",
-                new Object[] {file, flowFile, user});
+                    file, flowFile, user);
             session.getProvenanceReporter().route(flowFile, 
REL_PERMISSION_DENIED);
             session.transfer(session.penalize(flowFile), 
REL_PERMISSION_DENIED);
             return;
@@ -281,7 +281,7 @@ public class FetchFile extends AbstractProcessor {
                 if (targetDir.exists() && (!isWritable(targetDir) || 
!isDirectory(targetDir))) {
                     getLogger().error("Could not fetch file {} from file 
system for {} because Completion Strategy is configured to move the original 
file to {}, "
                         + "but that is not a directory or user {} does not 
have permissions to write to that directory",
-                        new Object[] {file, flowFile, targetDir, user});
+                            file, flowFile, targetDir, user);
                     session.transfer(flowFile, REL_FAILURE);
                     return;
                 }
@@ -305,7 +305,7 @@ public class FetchFile extends AbstractProcessor {
                     if (targetFile.exists()) {
                         getLogger().error("Could not fetch file {} from file 
system for {} because Completion Strategy is configured to move the original 
file to {}, "
                             + "but a file with name {} already exists in that 
directory and the Move Conflict Strategy is configured for failure",
-                            new Object[] {file, flowFile, targetDir, 
file.getName()});
+                                file, flowFile, targetDir, file.getName());
                         session.transfer(flowFile, REL_FAILURE);
                         return;
                     }
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
index ca03f3a804..18df55617e 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java
@@ -148,7 +148,7 @@ public class HandleHttpResponse extends AbstractProcessor {
         final HttpServletResponse response = 
contextMap.getResponse(contextIdentifier);
         if (response == null) {
             getLogger().error("Failed to respond to HTTP request for {} 
because FlowFile had an '{}' attribute of {} but could not find an HTTP 
Response Object for this identifier",
-                    new Object[]{flowFile, HTTPUtils.HTTP_CONTEXT_ID, 
contextIdentifier});
+                    flowFile, HTTPUtils.HTTP_CONTEXT_ID, contextIdentifier);
             session.transfer(flowFile, REL_FAILURE);
             return;
         }
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
index c242671d2d..15af59b7e4 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
@@ -1058,7 +1058,7 @@ public class ListFile extends 
AbstractListProcessor<FileInfo> {
             if (duration > maxDiskOperationMillis) {
                 final String fullPath = getFullPath();
                 logger.warn("This Processor completed action {} on {} in {} 
milliseconds, which exceeds the configured threshold of {} milliseconds",
-                    new Object[] {operation, fullPath, duration, 
maxDiskOperationMillis});
+                        operation, fullPath, duration, maxDiskOperationMillis);
             }
 
             if (logger.isTraceEnabled()) {
@@ -1332,7 +1332,7 @@ public class ListFile extends 
AbstractListProcessor<FileInfo> {
                 }
 
                 logger.warn("This Processor has currently spent {} 
milliseconds performing the {} action on {}, which exceeds the configured 
threshold of {} milliseconds",
-                    new Object[] {activeTime, activeOperation.getOperation(), 
fullPath, maxDiskOperationMillis});
+                        activeTime, activeOperation.getOperation(), fullPath, 
maxDiskOperationMillis);
             }
         }
 
@@ -1347,7 +1347,7 @@ public class ListFile extends 
AbstractListProcessor<FileInfo> {
             if (activeMillis > maxListingMillis) {
                 final String fullPath = activeDirectory.isEmpty() ? "the base 
directory" : activeDirectory;
                 logger.warn("This processor has currently spent {} 
milliseconds performing the listing of {}, which exceeds the configured 
threshold of {} milliseconds",
-                    new Object[] {activeMillis, fullPath, maxListingMillis});
+                        activeMillis, fullPath, maxListingMillis);
             }
         }
     }
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index 9d72e07484..c57576174f 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -818,7 +818,7 @@ public class MergeContent extends BinFiles {
                                     
tarEntry.setMode(Integer.parseInt(permissionsVal));
                                 } catch (final Exception e) {
                                     getLogger().debug("Attribute {} of {} is 
set to {}; expected 3 digits between 0-7, so ignoring",
-                                        new Object[] 
{TAR_PERMISSIONS_ATTRIBUTE, flowFile, permissionsVal});
+                                            TAR_PERMISSIONS_ATTRIBUTE, 
flowFile, permissionsVal);
                                 }
                             }
 
@@ -829,7 +829,7 @@ public class MergeContent extends BinFiles {
                                     
tarEntry.setModTime(Instant.parse(modTime).toEpochMilli());
                                 } catch (final Exception e) {
                                     getLogger().debug("Attribute {} of {} is 
set to {}; expected ISO8601 format, so ignoring",
-                                        new Object[] {TAR_MODIFIED_TIME, 
flowFile, modTime});
+                                            TAR_MODIFIED_TIME, flowFile, 
modTime);
                                 }
                             }
 
@@ -1037,7 +1037,7 @@ public class MergeContent extends BinFiles {
                                         // check that we're appending to the 
same schema
                                         if 
(!schema.get().equals(reader.getSchema())) {
                                             getLogger().debug("Input file {} 
has different schema - {}, not merging",
-                                                new Object[] 
{flowFile.getId(), reader.getSchema().getName()});
+                                                    flowFile.getId(), 
reader.getSchema().getName());
                                             canMerge = false;
                                             unmerged.add(flowFile);
                                         }
@@ -1053,7 +1053,7 @@ public class MergeContent extends BinFiles {
                                                         // Ignore additional 
metadata if ALL_COMMON is the strategy, otherwise don't merge
                                                         if 
(!METADATA_STRATEGY_ALL_COMMON.getValue().equals(metadataStrategy) || 
writersMetadatum != null) {
                                                             
getLogger().debug("Input file {} has different non-reserved metadata, not 
merging",
-                                                                new Object[] 
{flowFile.getId()});
+                                                                    
flowFile.getId());
                                                             canMerge = false;
                                                             
unmerged.add(flowFile);
                                                         }
@@ -1069,7 +1069,7 @@ public class MergeContent extends BinFiles {
                                         }
                                         if 
(!inputCodec.get().equals(thisCodec)) {
                                             getLogger().debug("Input file {} 
has different codec, not merging",
-                                                new Object[] 
{flowFile.getId()});
+                                                    flowFile.getId());
                                             canMerge = false;
                                             unmerged.add(flowFile);
                                         }
diff --git 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
index fecc65d486..e187d1f3c7 100644
--- 
a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
+++ 
b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ContentAcknowledgmentServlet.java
@@ -115,7 +115,7 @@ public class ContentAcknowledgmentServlet extends 
HttpServlet {
 
             logger.info("received {} files/{} bytes from Remote Host: [{}] 
Port [{}] SubjectDN [{}] in {} milliseconds at a rate of {}; "
                     + "transferring to 'success': {}",
-                    new Object[]{flowFiles.size(), totalFlowFileSize, 
request.getRemoteHost(), request.getRemotePort(), foundSubject, transferTime, 
transferRate, flowFiles});
+                    flowFiles.size(), totalFlowFileSize, 
request.getRemoteHost(), request.getRemotePort(), foundSubject, transferTime, 
transferRate, flowFiles);
 
             final String sendingSubject = foundSubject;
             final ProcessSession session = timeWrapper.getSession();
@@ -126,13 +126,13 @@ public class ContentAcknowledgmentServlet extends 
HttpServlet {
                     response.flushBuffer();
                 } catch (final Exception e) {
                     logger.error("Received DELETE for HOLD with ID {} from 
Remote Host: [{}] Port [{}] SubjectDN [{}]. FlowFiles were released but failed 
to acknowledge them.",
-                        new Object[]{uuid, request.getRemoteHost(), 
request.getRemotePort(), sendingSubject, e.toString()});
+                            uuid, request.getRemoteHost(), 
request.getRemotePort(), sendingSubject, e.toString());
                 }
             });
         } catch (final Throwable t) {
             timeWrapper.getSession().rollback();
             logger.error("Received DELETE for HOLD with ID {} from Remote 
Host: [{}] Port [{}] SubjectDN [{}], but failed to process the request due to 
{}",
-                    new Object[]{uuid, request.getRemoteHost(), 
request.getRemotePort(), foundSubject, t.toString()});
+                    uuid, request.getRemoteHost(), request.getRemotePort(), 
foundSubject, t.toString());
             if (logger.isDebugEnabled()) {
                 logger.error("", t);
             }
diff --git 
a/nifi-extension-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
 
b/nifi-extension-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
index f93abf89e6..edb369be40 100644
--- 
a/nifi-extension-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
+++ 
b/nifi-extension-bundles/nifi-standard-services/nifi-http-context-map-bundle/nifi-http-context-map/src/main/java/org/apache/nifi/http/StandardHttpContextMap.java
@@ -189,7 +189,7 @@ public class StandardHttpContextMap extends 
AbstractControllerService implements
                         final AsyncContext async = entry.getValue().getAsync();
 
                         getLogger().warn("Request from {} timed out; 
responding with SERVICE_UNAVAILABLE",
-                                new 
Object[]{async.getRequest().getRemoteAddr()});
+                                async.getRequest().getRemoteAddr());
 
                         ((HttpServletResponse) 
async.getResponse()).sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, 
"Timeout occurred");
                         async.complete();
diff --git 
a/nifi-extension-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/RestLookupService.java
 
b/nifi-extension-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/RestLookupService.java
index c393f5b85e..4a8ed532b7 100644
--- 
a/nifi-extension-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/RestLookupService.java
+++ 
b/nifi-extension-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/RestLookupService.java
@@ -372,7 +372,7 @@ public class RestLookupService extends 
AbstractControllerService implements Reco
 
             if (getLogger().isDebugEnabled()) {
                 getLogger().debug("Response code {} was returned for 
coordinate {}",
-                        new Object[]{response.code(), coordinates});
+                        response.code(), coordinates);
             }
 
             if (!response.isSuccessful()


Reply via email to