This is an automated email from the ASF dual-hosted git repository.
exceptionfactory pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git
The following commit(s) were added to refs/heads/main by this push:
new 7992d06632 NIFI-15019 Standardized property names in GCP, Geohash,
Graph, and Groovy bundles (#10397)
7992d06632 is described below
commit 7992d066326acf2cc986a39a1b0abc032ff5f7f6
Author: dan-s1 <[email protected]>
AuthorDate: Thu Oct 16 23:37:15 2025 -0400
NIFI-15019 Standardized property names in GCP, Geohash, Graph, and Groovy
bundles (#10397)
Signed-off-by: David Handermann <[email protected]>
---
.../nifi/processors/gcp/AbstractGCPProcessor.java | 12 +++---
.../gcp/bigquery/AbstractBigQueryProcessor.java | 36 +++++++---------
.../gcp/bigquery/BigQueryAttributes.java | 50 ----------------------
.../nifi/processors/gcp/bigquery/PutBigQuery.java | 41 ++++++++++--------
.../factory/CredentialPropertyDescriptors.java | 12 ++----
.../service/GCPCredentialsControllerService.java | 9 ++++
.../processors/gcp/drive/FetchGoogleDrive.java | 12 +++++-
.../processors/gcp/drive/GoogleDriveTrait.java | 8 ++--
.../nifi/processors/gcp/drive/ListGoogleDrive.java | 15 ++++---
.../nifi/processors/gcp/drive/PutGoogleDrive.java | 30 ++++++++-----
.../gcp/pubsub/AbstractGCPubSubProcessor.java | 24 +++++++----
.../processors/gcp/pubsub/ConsumeGCPubSub.java | 12 ++++--
.../processors/gcp/pubsub/PublishGCPubSub.java | 10 ++++-
.../gcp/storage/AbstractGCSProcessor.java | 12 ++++--
.../processors/gcp/storage/DeleteGCSObject.java | 22 ++++++----
.../processors/gcp/storage/FetchGCSObject.java | 34 +++++++++------
.../nifi/processors/gcp/storage/ListGCSBucket.java | 22 +++++-----
.../nifi/processors/gcp/storage/PutGCSObject.java | 42 ++++++++++--------
.../nifi/processors/gcp/util/GoogleUtils.java | 4 +-
.../gcp/vision/AbstractGcpVisionProcessor.java | 7 +++
...bstractGetGcpVisionAnnotateOperationStatus.java | 10 ++++-
.../vision/AbstractStartGcpVisionOperation.java | 14 ++++--
.../StartGcpVisionAnnotateFilesOperation.java | 10 ++++-
.../StartGcpVisionAnnotateImagesOperation.java | 10 ++++-
.../processors/gcp/bigquery/PutBigQueryIT.java | 48 +++++++++++----------
.../processors/gcp/bigquery/PutBigQueryTest.java | 4 +-
.../nifi/processors/geohash/GeohashRecord.java | 41 ++++++++++--------
.../processors/graph/AbstractGraphExecutor.java | 6 +--
.../nifi/processors/graph/ExecuteGraphQuery.java | 7 +++
.../processors/graph/ExecuteGraphQueryRecord.java | 21 +++++----
.../nifi/graph/Neo4JCypherClientService.java | 37 +++++++++-------
.../processors/groovyx/ExecuteGroovyScript.java | 21 +++++----
32 files changed, 362 insertions(+), 281 deletions(-)
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/AbstractGCPProcessor.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/AbstractGCPProcessor.java
index 5ca79a8bff..7ab48a1101 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/AbstractGCPProcessor.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/AbstractGCPProcessor.java
@@ -52,18 +52,16 @@ public abstract class AbstractGCPProcessor<
private static final String OBSOLETE_PROXY_USERNAME =
"gcp-proxy-user-name";
private static final String OBSOLETE_PROXY_PASSWORD =
"gcp-proxy-user-password";
- public static final PropertyDescriptor PROJECT_ID = new PropertyDescriptor
- .Builder().name("gcp-project-id")
- .displayName("Project ID")
+ public static final PropertyDescriptor PROJECT_ID = new
PropertyDescriptor.Builder()
+ .name("Project ID")
.description("Google Cloud Project ID")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
- public static final PropertyDescriptor RETRY_COUNT = new PropertyDescriptor
- .Builder().name("gcp-retry-count")
- .displayName("Number of retries")
+ public static final PropertyDescriptor RETRY_COUNT = new
PropertyDescriptor.Builder()
+ .name("Number of Retries")
.description("How many retry attempts should be made before
routing to the failure relationship.")
.defaultValue("6")
.required(true)
@@ -89,6 +87,8 @@ public abstract class AbstractGCPProcessor<
@Override
public void migrateProperties(final PropertyConfiguration config) {
ProxyServiceMigration.migrateProxyProperties(config,
PROXY_CONFIGURATION_SERVICE, OBSOLETE_PROXY_HOST, OBSOLETE_PROXY_PORT,
OBSOLETE_PROXY_USERNAME, OBSOLETE_PROXY_PASSWORD);
+ config.renameProperty("gcp-project-id", PROJECT_ID.getName());
+ config.renameProperty("gcp-retry-count", RETRY_COUNT.getName());
}
/**
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java
index 10b3b00e57..d4d48ad28b 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/AbstractBigQueryProcessor.java
@@ -29,6 +29,7 @@ import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.VerifiableProcessor;
@@ -52,9 +53,9 @@ import static
org.apache.nifi.processors.gcp.util.GoogleUtils.GOOGLE_CLOUD_BIGQU
*/
public abstract class AbstractBigQueryProcessor extends
AbstractGCPProcessor<BigQuery, BigQueryOptions> implements VerifiableProcessor
{
- static final int BUFFER_SIZE = 65536;
-
private static final List<String> REQUIRED_PERMISSIONS =
Collections.singletonList("bigquery.tables.updateData");
+ private static final String DATASET_ATTR = "Dataset";
+ private static final String TABLE_NAME_ATTR = "Table Name";
public static final Relationship REL_SUCCESS = new
Relationship.Builder().name("success")
.description("FlowFiles are routed to this relationship after a
successful Google BigQuery operation.")
@@ -69,40 +70,35 @@ public abstract class AbstractBigQueryProcessor extends
AbstractGCPProcessor<Big
);
public static final PropertyDescriptor DATASET = new
PropertyDescriptor.Builder()
- .name(BigQueryAttributes.DATASET_ATTR)
- .displayName("Dataset")
- .description(BigQueryAttributes.DATASET_DESC)
+ .name(DATASET_ATTR)
+ .description("BigQuery dataset name (Note - The dataset must exist
in GCP)")
.required(true)
- .defaultValue("${" + BigQueryAttributes.DATASET_ATTR + "}")
+ .defaultValue("${" + DATASET_ATTR + "}")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR)
.build();
public static final PropertyDescriptor TABLE_NAME = new
PropertyDescriptor.Builder()
- .name(BigQueryAttributes.TABLE_NAME_ATTR)
- .displayName("Table Name")
- .description(BigQueryAttributes.TABLE_NAME_DESC)
+ .name(TABLE_NAME_ATTR)
+ .description("BigQuery table name")
.required(true)
- .defaultValue("${" + BigQueryAttributes.TABLE_NAME_ATTR + "}")
+ .defaultValue("${" + TABLE_NAME_ATTR + "}")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR)
.build();
- public static final PropertyDescriptor IGNORE_UNKNOWN = new
PropertyDescriptor.Builder()
- .name(BigQueryAttributes.IGNORE_UNKNOWN_ATTR)
- .displayName("Ignore Unknown Values")
- .description(BigQueryAttributes.IGNORE_UNKNOWN_DESC)
- .required(true)
- .addValidator(StandardValidators.BOOLEAN_VALIDATOR)
-
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
- .defaultValue("false")
- .build();
-
@Override
public Set<Relationship> getRelationships() {
return RELATIONSHIPS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("bq.dataset", DATASET.getName());
+ config.renameProperty("bq.table.name", TABLE_NAME.getName());
+ }
+
@Override
protected GoogleCredentials getGoogleCredentials(ProcessContext context) {
return
super.getGoogleCredentials(context).createScoped(GOOGLE_CLOUD_BIGQUERY_SCOPE);
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/BigQueryAttributes.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/BigQueryAttributes.java
deleted file mode 100644
index d356327f8a..0000000000
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/BigQueryAttributes.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.processors.gcp.bigquery;
-
-/**
- * Attributes associated with the BigQuery processors
- */
-public class BigQueryAttributes {
- private BigQueryAttributes() {
- }
-
- // Properties
- public static final String IGNORE_UNKNOWN_ATTR = "bq.load.ignore_unknown";
- public static final String IGNORE_UNKNOWN_DESC = "Sets whether BigQuery
should allow extra values that are not represented "
- + "in the table schema. If true, the extra values are ignored. If
false, records with extra columns are treated as "
- + "bad records, and if there are too many bad records, an invalid
error is returned in the job result. By default "
- + "unknown values are not allowed.";
-
- public static final String DATASET_ATTR = "bq.dataset";
- public static final String DATASET_DESC = "BigQuery dataset name (Note -
The dataset must exist in GCP)";
-
- public static final String TABLE_NAME_ATTR = "bq.table.name";
- public static final String TABLE_NAME_DESC = "BigQuery table name";
-
- public static final String RECORD_READER_ATTR = "bq.record.reader";
- public static final String RECORD_READER_DESC = "Specifies the Controller
Service to use for parsing incoming data.";
-
- public static final String SKIP_INVALID_ROWS_ATTR = "bq.skip.invalid.rows";
- public static final String SKIP_INVALID_ROWS_DESC = "Sets whether to
insert all valid rows of a request, even if invalid "
- + "rows exist. If not set the entire insert request will fail if
it contains an invalid row.";
-
- // Batch Attributes
- public static final String JOB_NB_RECORDS_ATTR = "bq.records.count";
- public static final String JOB_NB_RECORDS_DESC = "Number of records
successfully inserted";
-}
\ No newline at end of file
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
index c801bdc56c..4448c2cf6a 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/bigquery/PutBigQuery.java
@@ -59,6 +59,7 @@ import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
@@ -96,7 +97,7 @@ import java.util.concurrent.atomic.AtomicReference;
"are skipped. Exactly once delivery semantics are achieved via stream
offsets.")
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
@WritesAttributes({
- @WritesAttribute(attribute = BigQueryAttributes.JOB_NB_RECORDS_ATTR,
description = BigQueryAttributes.JOB_NB_RECORDS_DESC)
+ @WritesAttribute(attribute = PutBigQuery.JOB_NB_RECORDS_ATTR, description
= "Number of records successfully inserted")
})
public class PutBigQuery extends AbstractBigQueryProcessor {
@@ -104,11 +105,11 @@ public class PutBigQuery extends
AbstractBigQueryProcessor {
static final String BATCH = "BATCH";
static final AllowableValue STREAM_TYPE = new AllowableValue(STREAM,
STREAM, "Use streaming record handling strategy");
static final AllowableValue BATCH_TYPE = new AllowableValue(BATCH, BATCH,
"Use batching record handling strategy");
+ static final String JOB_NB_RECORDS_ATTR = "bq.records.count";
- private static final String APPEND_RECORD_COUNT_NAME =
"bq.append.record.count";
private static final String APPEND_RECORD_COUNT_DESC = "The number of
records to be appended to the write stream at once. Applicable for both batch
and stream types";
- private static final String TRANSFER_TYPE_NAME = "bq.transfer.type";
private static final String TRANSFER_TYPE_DESC = "Defines the preferred
transfer type streaming or batching";
+ private static final String SKIP_INVALID_ROWS_ATTR = "Skip Invalid Rows";
private static final List<Status.Code> RETRYABLE_ERROR_CODES =
Arrays.asList(Status.Code.INTERNAL, Status.Code.ABORTED, Status.Code.CANCELLED);
@@ -128,8 +129,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
.build();
public static final PropertyDescriptor BIGQUERY_API_ENDPOINT = new
PropertyDescriptor.Builder()
- .name("bigquery-api-endpoint")
- .displayName("BigQuery API Endpoint")
+ .name("BigQuery API Endpoint")
.description("Can be used to override the default BigQuery endpoint.
Default is "
+ BigQueryWriteStubSettings.getDefaultEndpoint() + ". "
+ "Format must be hostname:port.")
@@ -140,8 +140,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
.build();
static final PropertyDescriptor TRANSFER_TYPE = new
PropertyDescriptor.Builder()
- .name(TRANSFER_TYPE_NAME)
- .displayName("Transfer Type")
+ .name("Transfer Type")
.description(TRANSFER_TYPE_DESC)
.required(true)
.defaultValue(STREAM_TYPE.getValue())
@@ -149,8 +148,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
.build();
static final PropertyDescriptor APPEND_RECORD_COUNT = new
PropertyDescriptor.Builder()
- .name(APPEND_RECORD_COUNT_NAME)
- .displayName("Append Record Count")
+ .name("Append Record Count")
.description(APPEND_RECORD_COUNT_DESC)
.required(true)
.defaultValue("20")
@@ -158,17 +156,16 @@ public class PutBigQuery extends
AbstractBigQueryProcessor {
.build();
public static final PropertyDescriptor RECORD_READER = new
PropertyDescriptor.Builder()
- .name(BigQueryAttributes.RECORD_READER_ATTR)
- .displayName("Record Reader")
- .description(BigQueryAttributes.RECORD_READER_DESC)
+ .name("Record Reader")
+ .description("Specifies the Controller Service to use for parsing
incoming data.")
.identifiesControllerService(RecordReaderFactory.class)
.required(true)
.build();
public static final PropertyDescriptor SKIP_INVALID_ROWS = new
PropertyDescriptor.Builder()
- .name(BigQueryAttributes.SKIP_INVALID_ROWS_ATTR)
- .displayName("Skip Invalid Rows")
- .description(BigQueryAttributes.SKIP_INVALID_ROWS_DESC)
+ .name(SKIP_INVALID_ROWS_ATTR)
+ .description("Sets whether to insert all valid rows of a request, even
if invalid "
+ + "rows exist. If not set the entire insert request will fail
if it contains an invalid row.")
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -246,7 +243,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
RecordReader reader =
readerFactory.createRecordReader(flowFile, in, getLogger())) {
recordNumWritten = writeRecordsToStream(reader,
protoDescriptor, skipInvalidRows, tableSchema);
}
- flowFile = session.putAttribute(flowFile,
BigQueryAttributes.JOB_NB_RECORDS_ATTR, Integer.toString(recordNumWritten));
+ flowFile = session.putAttribute(flowFile, JOB_NB_RECORDS_ATTR,
Integer.toString(recordNumWritten));
} catch (Exception e) {
error.set(e);
} finally {
@@ -254,6 +251,16 @@ public class PutBigQuery extends AbstractBigQueryProcessor
{
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("bigquery-api-endpoint",
BIGQUERY_API_ENDPOINT.getName());
+ config.renameProperty("bq.transfer.type", TRANSFER_TYPE.getName());
+ config.renameProperty("bq.append.record.count",
APPEND_RECORD_COUNT.getName());
+ config.renameProperty("bq.record.reader", RECORD_READER.getName());
+ config.renameProperty("bq.skip.invalid.rows",
SKIP_INVALID_ROWS.getName());
+ }
+
private int writeRecordsToStream(RecordReader reader,
Descriptors.Descriptor descriptor, boolean skipInvalidRows, TableSchema
tableSchema) throws Exception {
Record currentRecord;
int offset = 0;
@@ -321,7 +328,7 @@ public class PutBigQuery extends AbstractBigQueryProcessor {
// Verify that no error occurred in the stream.
if (error.get() != null) {
getLogger().error("Stream processing failed", error.get());
- flowFile = session.putAttribute(flowFile,
BigQueryAttributes.JOB_NB_RECORDS_ATTR, isBatch() ? "0" :
String.valueOf(appendSuccessCount.get() * recordBatchCount));
+ flowFile = session.putAttribute(flowFile, JOB_NB_RECORDS_ATTR,
isBatch() ? "0" : String.valueOf(appendSuccessCount.get() * recordBatchCount));
session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
error.set(null); // set error to null for next execution
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialPropertyDescriptors.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialPropertyDescriptors.java
index 774c603656..eadc9af773 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialPropertyDescriptors.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/factory/CredentialPropertyDescriptors.java
@@ -38,8 +38,7 @@ public final class CredentialPropertyDescriptors {
* </a>
*/
public static final PropertyDescriptor USE_APPLICATION_DEFAULT_CREDENTIALS
= new PropertyDescriptor.Builder()
- .name("application-default-credentials")
- .displayName("Use Application Default Credentials")
+ .name("Use Application Default Credentials")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(false)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -53,8 +52,7 @@ public final class CredentialPropertyDescriptors {
.build();
public static final PropertyDescriptor USE_COMPUTE_ENGINE_CREDENTIALS =
new PropertyDescriptor.Builder()
- .name("compute-engine-credentials")
- .displayName("Use Compute Engine Credentials")
+ .name("Use Compute Engine Credentials")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(false)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -73,8 +71,7 @@ public final class CredentialPropertyDescriptors {
* </a>
*/
public static final PropertyDescriptor SERVICE_ACCOUNT_JSON_FILE = new
PropertyDescriptor.Builder()
- .name("service-account-json-file")
- .displayName("Service Account JSON File")
+ .name("Service Account JSON File")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(false)
.identifiesExternalResource(ResourceCardinality.SINGLE,
ResourceType.FILE)
@@ -82,8 +79,7 @@ public final class CredentialPropertyDescriptors {
.build();
public static final PropertyDescriptor SERVICE_ACCOUNT_JSON = new
PropertyDescriptor.Builder()
- .name("service-account-json")
- .displayName("Service Account JSON")
+ .name("Service Account JSON")
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.required(false)
.addValidator(JsonValidator.INSTANCE)
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/service/GCPCredentialsControllerService.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/service/GCPCredentialsControllerService.java
index 8f6c5f8a4e..fdaf38fd5c 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/service/GCPCredentialsControllerService.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/credentials/service/GCPCredentialsControllerService.java
@@ -34,6 +34,7 @@ import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.VerifiableControllerService;
import org.apache.nifi.gcp.credentials.service.GCPCredentialsService;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processors.gcp.ProxyAwareTransportFactory;
import org.apache.nifi.processors.gcp.credentials.factory.CredentialsFactory;
@@ -133,6 +134,14 @@ public class GCPCredentialsControllerService extends
AbstractControllerService i
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("application-default-credentials",
USE_APPLICATION_DEFAULT_CREDENTIALS.getName());
+ config.renameProperty("compute-engine-credentials",
USE_COMPUTE_ENGINE_CREDENTIALS.getName());
+ config.renameProperty("service-account-json-file",
SERVICE_ACCOUNT_JSON_FILE.getName());
+ config.renameProperty("service-account-json",
SERVICE_ACCOUNT_JSON.getName());
+ }
+
private GoogleCredentials getGoogleCredentials(final ConfigurationContext
context) throws IOException {
final ProxyConfiguration proxyConfiguration =
ProxyConfiguration.getConfiguration(context);
final HttpTransportFactory transportFactory = new
ProxyAwareTransportFactory(proxyConfiguration);
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java
index 58df4aed9a..d5187bcec7 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/FetchGoogleDrive.java
@@ -49,6 +49,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@@ -207,8 +208,7 @@ public class FetchGoogleDrive extends AbstractProcessor
implements GoogleDriveTr
public static final PropertyDescriptor FILE_ID = new
PropertyDescriptor.Builder()
- .name("drive-file-id")
- .displayName("File ID")
+ .name("File ID")
.description("The Drive ID of the File to fetch. Please see
Additional Details for information on how to obtain the Drive ID.")
.required(true)
.defaultValue("${drive.id}")
@@ -363,6 +363,14 @@ public class FetchGoogleDrive extends AbstractProcessor
implements GoogleDriveTr
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("drive-file-id", FILE_ID.getName());
+ config.renameProperty(OLD_CONNECT_TIMEOUT_PROPERTY_NAME,
CONNECT_TIMEOUT.getName());
+ config.renameProperty(OLD_READ_TIMEOUT_PROPERTY_NAME,
READ_TIMEOUT.getName());
+
config.renameProperty(GoogleUtils.OLD_GCP_CREDENTIALS_PROVIDER_SERVICE_PROPERTY_NAME,
GoogleUtils.GCP_CREDENTIALS_PROVIDER_SERVICE.getName());
+ }
+
private String getExportType(final String mimeType, final ProcessContext
context) {
if (mimeType == null) {
return null;
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/GoogleDriveTrait.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/GoogleDriveTrait.java
index d8f030feed..1a5f787f87 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/GoogleDriveTrait.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/GoogleDriveTrait.java
@@ -48,12 +48,13 @@ public interface GoogleDriveTrait {
String DRIVE_SHORTCUT_MIME_TYPE = "application/vnd.google-apps.shortcut";
String DRIVE_URL = "https://drive.google.com/open?id=";
String APPLICATION_NAME = "NiFi";
+ String OLD_CONNECT_TIMEOUT_PROPERTY_NAME = "connect-timeout";
+ String OLD_READ_TIMEOUT_PROPERTY_NAME = "read-timeout";
JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance();
PropertyDescriptor CONNECT_TIMEOUT = new PropertyDescriptor.Builder()
- .name("connect-timeout")
- .displayName("Connect Timeout")
+ .name("Connect Timeout")
.description("Maximum wait time for connection to Google Drive
service.")
.required(true)
.defaultValue("20 sec")
@@ -62,8 +63,7 @@ public interface GoogleDriveTrait {
.build();
PropertyDescriptor READ_TIMEOUT = new PropertyDescriptor.Builder()
- .name("read-timeout")
- .displayName("Read Timeout")
+ .name("Read Timeout")
.description("Maximum wait time for response from Google Drive
service.")
.required(true)
.defaultValue("60 sec")
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/ListGoogleDrive.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/ListGoogleDrive.java
index ea2d5e98eb..140d77e861 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/ListGoogleDrive.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/ListGoogleDrive.java
@@ -140,8 +140,7 @@ import static
org.apache.nifi.processors.gcp.drive.GoogleDriveAttributes.WEB_VIE
@DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min")
public class ListGoogleDrive extends
AbstractListProcessor<GoogleDriveFileInfo> implements GoogleDriveTrait {
public static final PropertyDescriptor FOLDER_ID = new
PropertyDescriptor.Builder()
- .name("folder-id")
- .displayName("Folder ID")
+ .name("Folder ID")
.description("The ID of the folder from which to pull list of
files." +
" Please see Additional Details to set up access to Google
Drive and obtain Folder ID." +
" WARNING: Unauthorized access to the folder is treated as
if the folder was empty." +
@@ -152,8 +151,7 @@ public class ListGoogleDrive extends
AbstractListProcessor<GoogleDriveFileInfo>
.build();
public static final PropertyDescriptor RECURSIVE_SEARCH = new
PropertyDescriptor.Builder()
- .name("recursive-search")
- .displayName("Search Recursively")
+ .name("Search Recursively")
.description("When 'true', will include list of files from
concrete sub-folders (ignores shortcuts)." +
" Otherwise, will return only files that have the defined
'Folder ID' as their parent directly." +
" WARNING: The listing may fail if there are too many
sub-folders (500+).")
@@ -163,8 +161,7 @@ public class ListGoogleDrive extends
AbstractListProcessor<GoogleDriveFileInfo>
.build();
public static final PropertyDescriptor MIN_AGE = new
PropertyDescriptor.Builder()
- .name("min-age")
- .displayName("Minimum File Age")
+ .name("Minimum File Age")
.description("The minimum age a file must be in order to be
considered; any files younger than this will be ignored.")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
@@ -240,6 +237,12 @@ public class ListGoogleDrive extends
AbstractListProcessor<GoogleDriveFileInfo>
config.renameProperty(ListedEntityTracker.OLD_TRACKING_STATE_CACHE_PROPERTY_NAME,
TRACKING_STATE_CACHE.getName());
config.renameProperty(ListedEntityTracker.OLD_TRACKING_TIME_WINDOW_PROPERTY_NAME,
TRACKING_TIME_WINDOW.getName());
config.renameProperty(ListedEntityTracker.OLD_INITIAL_LISTING_TARGET_PROPERTY_NAME,
INITIAL_LISTING_TARGET.getName());
+ config.renameProperty(OLD_CONNECT_TIMEOUT_PROPERTY_NAME,
CONNECT_TIMEOUT.getName());
+ config.renameProperty(OLD_READ_TIMEOUT_PROPERTY_NAME,
READ_TIMEOUT.getName());
+ config.renameProperty("folder-id", FOLDER_ID.getName());
+ config.renameProperty("recursive-search", RECURSIVE_SEARCH.getName());
+ config.renameProperty("min-age", MIN_AGE.getName());
+
config.renameProperty(GoogleUtils.OLD_GCP_CREDENTIALS_PROVIDER_SERVICE_PROPERTY_NAME,
GoogleUtils.GCP_CREDENTIALS_PROVIDER_SERVICE.getName());
}
@Override
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java
index 8cd42c25cb..92173d47be 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/drive/PutGoogleDrive.java
@@ -45,6 +45,7 @@ import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
@@ -54,6 +55,7 @@ import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import
org.apache.nifi.processors.conflict.resolution.ConflictResolutionStrategy;
import org.apache.nifi.processors.gcp.ProxyAwareTransportFactory;
+import org.apache.nifi.processors.gcp.util.GoogleUtils;
import org.apache.nifi.proxy.ProxyConfiguration;
import org.json.JSONObject;
@@ -120,8 +122,7 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
public static final int MAX_ALLOWED_CHUNK_SIZE_IN_BYTES = 1024 * 1024 *
1024;
public static final PropertyDescriptor FOLDER_ID = new
PropertyDescriptor.Builder()
- .name("folder-id")
- .displayName("Folder ID")
+ .name("Folder ID")
.description("The ID of the shared folder." +
" Please see Additional Details to set up access to Google
Drive and obtain Folder ID.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -130,8 +131,7 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
.build();
public static final PropertyDescriptor FILE_NAME = new
PropertyDescriptor.Builder()
- .name("file-name")
- .displayName("Filename")
+ .name("Filename")
.description("The name of the file to upload to the specified
Google Drive folder.")
.required(true)
.defaultValue("${filename}")
@@ -140,8 +140,7 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
.build();
public static final PropertyDescriptor CONFLICT_RESOLUTION = new
PropertyDescriptor.Builder()
- .name("conflict-resolution-strategy")
- .displayName("Conflict Resolution Strategy")
+ .name("Conflict Resolution Strategy")
.description("Indicates what should happen when a file with the
same name already exists in the specified Google Drive folder.")
.required(true)
.defaultValue(FAIL.getValue())
@@ -149,8 +148,7 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
.build();
public static final PropertyDescriptor CHUNKED_UPLOAD_SIZE = new
PropertyDescriptor.Builder()
- .name("chunked-upload-size")
- .displayName("Chunked Upload Size")
+ .name("Chunked Upload Size")
.description("Defines the size of a chunk. Used when a FlowFile's
size exceeds 'Chunked Upload Threshold' and content is uploaded in smaller
chunks. "
+ "Minimum allowed chunk size is 256 KB, maximum allowed
chunk size is 1 GB.")
.addValidator(createChunkSizeValidator())
@@ -159,8 +157,7 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
.build();
public static final PropertyDescriptor CHUNKED_UPLOAD_THRESHOLD = new
PropertyDescriptor.Builder()
- .name("chunked-upload-threshold")
- .displayName("Chunked Upload Threshold")
+ .name("Chunked Upload Threshold")
.description("The maximum size of the content which is uploaded at
once. FlowFiles larger than this threshold are uploaded in chunks.")
.defaultValue("100 MB")
.addValidator(DATA_SIZE_VALIDATOR)
@@ -322,6 +319,19 @@ public class PutGoogleDrive extends AbstractProcessor
implements GoogleDriveTrai
driveService = createDriveService(context, httpTransport,
DriveScopes.DRIVE, DriveScopes.DRIVE_METADATA);
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty(OLD_CONNECT_TIMEOUT_PROPERTY_NAME,
CONNECT_TIMEOUT.getName());
+ config.renameProperty(OLD_READ_TIMEOUT_PROPERTY_NAME,
READ_TIMEOUT.getName());
+ config.renameProperty("folder-id", FOLDER_ID.getName());
+ config.renameProperty("file-name", FILE_NAME.getName());
+ config.renameProperty("conflict-resolution-strategy",
CONFLICT_RESOLUTION.getName());
+ config.renameProperty("chunked-upload-size",
CHUNKED_UPLOAD_SIZE.getName());
+ config.renameProperty("chunked-upload-threshold",
CHUNKED_UPLOAD_THRESHOLD.getName());
+
config.renameProperty(GoogleUtils.OLD_GCP_CREDENTIALS_PROVIDER_SERVICE_PROPERTY_NAME,
GCP_CREDENTIALS_PROVIDER_SERVICE.getName());
+
+ }
+
private FlowFile addAttributes(File file, FlowFile flowFile,
ProcessSession session) {
final Map<String, String> attributes = new HashMap<>();
attributes.put(ID, file.getId());
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/AbstractGCPubSubProcessor.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/AbstractGCPubSubProcessor.java
index 27ddb800a1..6b8cf352ed 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/AbstractGCPubSubProcessor.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/AbstractGCPubSubProcessor.java
@@ -28,6 +28,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.VerifiableProcessor;
@@ -48,8 +49,7 @@ import static
org.apache.nifi.processors.gcp.util.GoogleUtils.GOOGLE_CLOUD_PUBSU
public abstract class AbstractGCPubSubProcessor extends AbstractGCPProcessor
implements VerifiableProcessor {
public static final PropertyDescriptor BATCH_SIZE_THRESHOLD = new
PropertyDescriptor.Builder()
- .name("gcp-pubsub-publish-batch-size")
- .displayName("Batch Size Threshold")
+ .name("Batch Size Threshold")
.description("Indicates the number of messages the cloud service
should bundle together in a batch. If not set and left empty, only one message
" +
"will be used in a batch")
.required(true)
@@ -58,8 +58,7 @@ public abstract class AbstractGCPubSubProcessor extends
AbstractGCPProcessor imp
.build();
public static final PropertyDescriptor BATCH_BYTES_THRESHOLD = new
PropertyDescriptor.Builder()
- .name("gcp-batch-bytes")
- .displayName("Batch Bytes Threshold")
+ .name("Batch Bytes Threshold")
.description("Publish request gets triggered based on this Batch
Bytes Threshold property and"
+ " the " + BATCH_SIZE_THRESHOLD.getDisplayName() + "
property, whichever condition is met first.")
.required(true)
@@ -69,8 +68,7 @@ public abstract class AbstractGCPubSubProcessor extends
AbstractGCPProcessor imp
.build();
public static final PropertyDescriptor BATCH_DELAY_THRESHOLD = new
PropertyDescriptor.Builder()
- .name("gcp-pubsub-publish-batch-delay")
- .displayName("Batch Delay Threshold")
+ .name("Batch Delay Threshold")
.description("Indicates the delay threshold to use for batching.
After this amount of time has elapsed " +
"(counting from the first element added), the elements
will be wrapped up in a batch and sent. " +
"This value should not be set too high, usually on the
order of milliseconds. Otherwise, calls " +
@@ -80,9 +78,8 @@ public abstract class AbstractGCPubSubProcessor extends
AbstractGCPProcessor imp
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.build();
- public static final PropertyDescriptor API_ENDPOINT = new
PropertyDescriptor
- .Builder().name("api-endpoint")
- .displayName("API Endpoint")
+ public static final PropertyDescriptor API_ENDPOINT = new
PropertyDescriptor.Builder()
+ .name("API Endpoint")
.description("Override the gRPC endpoint in the form of
[host:port]")
.addValidator(StandardValidators.HOSTNAME_PORT_LIST_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
@@ -110,6 +107,15 @@ public abstract class AbstractGCPubSubProcessor extends
AbstractGCPProcessor imp
return RELATIONSHIPS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcp-pubsub-publish-batch-size",
BATCH_SIZE_THRESHOLD.getName());
+ config.renameProperty("gcp-batch-bytes",
BATCH_BYTES_THRESHOLD.getName());
+ config.renameProperty("gcp-pubsub-publish-batch-delay",
BATCH_DELAY_THRESHOLD.getName());
+ config.renameProperty("api-endpoint", API_ENDPOINT.getName());
+ }
+
@Override
protected ServiceOptions getServiceOptions(ProcessContext context,
GoogleCredentials credentials) {
return null;
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java
index 87cd35e75d..5e118fe617 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/ConsumeGCPubSub.java
@@ -43,6 +43,7 @@ import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
@@ -112,8 +113,7 @@ public class ConsumeGCPubSub extends
AbstractGCPubSubProcessor {
private static final List<String> REQUIRED_PERMISSIONS =
Collections.singletonList("pubsub.subscriptions.consume");
public static final PropertyDescriptor SUBSCRIPTION = new
PropertyDescriptor.Builder()
- .name("gcp-pubsub-subscription")
- .displayName("Subscription")
+ .name("Subscription")
.addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR)
.description("Name of the Google Cloud Pub/Sub Subscription")
.required(true)
@@ -335,8 +335,14 @@ public class ConsumeGCPubSub extends
AbstractGCPubSubProcessor {
session.commitAsync(() -> acknowledgeAcks(ackIds, subscriptionName));
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcp-pubsub-subscription",
SUBSCRIPTION.getName());
+ }
+
private void processInputDemarcator(final ProcessSession session, final
List<ReceivedMessage> receivedMessages, final String subscriptionName,
- final List<String> ackIds) {
+ final List<String> ackIds) {
final byte[] demarcator = demarcatorValue == null ? new byte[0] :
demarcatorValue.getBytes(StandardCharsets.UTF_8);
FlowFile flowFile = session.create();
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java
index 02f7cb633b..83bb3c6ee9 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/PublishGCPubSub.java
@@ -51,6 +51,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@@ -155,8 +156,7 @@ public class PublishGCPubSub extends
AbstractGCPubSubProcessor {
.build();
public static final PropertyDescriptor TOPIC_NAME = new
PropertyDescriptor.Builder()
- .name("gcp-pubsub-topic")
- .displayName("Topic Name")
+ .name("Topic Name")
.description("Name of the Google Cloud PubSub Topic")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -309,6 +309,12 @@ public class PublishGCPubSub extends
AbstractGCPubSubProcessor {
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcp-pubsub-topic", TOPIC_NAME.getName());
+ }
+
private void onTriggerFlowFileStrategy(
final ProcessContext context,
final ProcessSession session,
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/AbstractGCSProcessor.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/AbstractGCSProcessor.java
index 35025b836b..e5a8f1eaaf 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/AbstractGCSProcessor.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/AbstractGCSProcessor.java
@@ -27,6 +27,7 @@ import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.VerifiableProcessor;
@@ -68,9 +69,8 @@ public abstract class AbstractGCSProcessor extends
AbstractGCPProcessor<Storage,
}
//
https://cloud.google.com/storage/docs/request-endpoints#storage-set-client-endpoint-java
- public static final PropertyDescriptor STORAGE_API_URL = new
PropertyDescriptor
- .Builder().name("storage-api-url")
- .displayName("Storage API URL")
+ public static final PropertyDescriptor STORAGE_API_URL = new
PropertyDescriptor.Builder()
+ .name("Storage API URL")
.description("Overrides the default storage URL. Configuring an
alternative Storage API URL also overrides the "
+ "HTTP Host header on requests as described in the Google
documentation for Private Service Connections.")
.addValidator(StandardValidators.URL_VALIDATOR)
@@ -111,6 +111,12 @@ public abstract class AbstractGCSProcessor extends
AbstractGCPProcessor<Storage,
return results;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("storage-api-url", STORAGE_API_URL.getName());
+ }
+
@Override
protected final Collection<ValidationResult>
customValidate(ValidationContext validationContext) {
final Collection<ValidationResult> results =
super.customValidate(validationContext);
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/DeleteGCSObject.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/DeleteGCSObject.java
index 5afb7594ff..61dc11f5ac 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/DeleteGCSObject.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/DeleteGCSObject.java
@@ -26,6 +26,7 @@ import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
@@ -47,9 +48,8 @@ import static
org.apache.nifi.processors.gcp.storage.StorageAttributes.KEY_DESC;
@SeeAlso({PutGCSObject.class, FetchGCSObject.class, ListGCSBucket.class})
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
public class DeleteGCSObject extends AbstractGCSProcessor {
- public static final PropertyDescriptor BUCKET = new PropertyDescriptor
- .Builder().name("gcs-bucket")
- .displayName("Bucket")
+ public static final PropertyDescriptor BUCKET = new
PropertyDescriptor.Builder()
+ .name("Bucket")
.description(BUCKET_DESC)
.required(true)
.defaultValue("${" + BUCKET_ATTR + "}")
@@ -57,9 +57,8 @@ public class DeleteGCSObject extends AbstractGCSProcessor {
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
- public static final PropertyDescriptor KEY = new PropertyDescriptor
- .Builder().name("gcs-key")
- .displayName("Key")
+ public static final PropertyDescriptor KEY = new
PropertyDescriptor.Builder()
+ .name("Key")
.description(KEY_DESC)
.required(true)
.defaultValue("${filename}")
@@ -68,8 +67,7 @@ public class DeleteGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor GENERATION = new
PropertyDescriptor.Builder()
- .name("gcs-generation")
- .displayName("Generation")
+ .name("Generation")
.description("The generation of the object to be deleted. If null,
will use latest version of the object.")
.addValidator(StandardValidators.POSITIVE_LONG_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -134,4 +132,12 @@ public class DeleteGCSObject extends AbstractGCSProcessor {
final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() -
startNanos);
getLogger().info("Successfully deleted GCS Object for {} in {} millis;
routing to success", flowFile, millis);
}
+
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcs-bucket", BUCKET.getName());
+ config.renameProperty("gcs-key", KEY.getName());
+ config.renameProperty("gcs-generation", GENERATION.getName());
+ }
}
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
index 6e877d6be9..e900e6d1ce 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
@@ -49,6 +49,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@@ -167,9 +168,8 @@ import static
org.apache.nifi.processors.gcp.storage.StorageAttributes.URI_DESC;
}
)
public class FetchGCSObject extends AbstractGCSProcessor {
- public static final PropertyDescriptor BUCKET = new PropertyDescriptor
- .Builder().name("gcs-bucket")
- .displayName("Bucket")
+ public static final PropertyDescriptor BUCKET = new
PropertyDescriptor.Builder()
+ .name("Bucket")
.description(BUCKET_DESC)
.required(true)
.defaultValue("${" + BUCKET_ATTR + "}")
@@ -177,9 +177,8 @@ public class FetchGCSObject extends AbstractGCSProcessor {
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
- public static final PropertyDescriptor KEY = new PropertyDescriptor
- .Builder().name("gcs-key")
- .displayName("Key")
+ public static final PropertyDescriptor KEY = new
PropertyDescriptor.Builder()
+ .name("Key")
.description(KEY_DESC)
.required(true)
.defaultValue("${" + CoreAttributes.FILENAME.key() + "}")
@@ -188,8 +187,7 @@ public class FetchGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor GENERATION = new
PropertyDescriptor.Builder()
- .name("gcs-generation")
- .displayName("Object Generation")
+ .name("Object Generation")
.description("The generation of the Object to download. If not
set, the latest generation will be downloaded.")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.POSITIVE_LONG_VALIDATOR)
@@ -197,8 +195,7 @@ public class FetchGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor ENCRYPTION_KEY = new
PropertyDescriptor.Builder()
- .name("gcs-server-side-encryption-key")
- .displayName("Server Side Encryption Key")
+ .name("Server Side Encryption Key")
.description("An AES256 Key (encoded in base64) which the object
has been encrypted in.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -207,8 +204,7 @@ public class FetchGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor RANGE_START = new
PropertyDescriptor.Builder()
- .name("gcs-object-range-start")
- .displayName("Range Start")
+ .name("Range Start")
.description("The byte position at which to start reading from the
object. An empty value or a value of " +
"zero will start reading at the beginning of the object.")
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
@@ -217,8 +213,7 @@ public class FetchGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor RANGE_LENGTH = new
PropertyDescriptor.Builder()
- .name("gcs-object-range-length")
- .displayName("Range Length")
+ .name("Range Length")
.description("The number of bytes to download from the object,
starting from the Range Start. An empty " +
"value or a value that extends beyond the end of the
object will read to the end of the object.")
.addValidator(StandardValidators.createDataSizeBoundsValidator(1,
Long.MAX_VALUE))
@@ -318,6 +313,17 @@ public class FetchGCSObject extends AbstractGCSProcessor {
session.getProvenanceReporter().fetch(flowFile, transitUri, millis);
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcs-bucket", BUCKET.getName());
+ config.renameProperty("gcs-key", KEY.getName());
+ config.renameProperty("gcs-generation", GENERATION.getName());
+ config.renameProperty("gcs-server-side-encryption-key",
ENCRYPTION_KEY.getName());
+ config.renameProperty("gcs-object-range-start", RANGE_START.getName());
+ config.renameProperty("gcs-object-range-length",
RANGE_LENGTH.getName());
+ }
+
private FetchedBlob fetchBlob(final ProcessContext context, final Storage
storage, final Map<String, String> attributes) throws IOException {
final String bucketName =
context.getProperty(BUCKET).evaluateAttributeExpressions(attributes).getValue();
final String key =
context.getProperty(KEY).evaluateAttributeExpressions(attributes).getValue();
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/ListGCSBucket.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/ListGCSBucket.java
index 2ca745bc02..3f0ba5386b 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/ListGCSBucket.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/ListGCSBucket.java
@@ -198,8 +198,7 @@ public class ListGCSBucket extends AbstractGCSProcessor {
" Any property that relates to the persisting state will
be ignored.");
public static final PropertyDescriptor LISTING_STRATEGY = new
PropertyDescriptor.Builder()
- .name("listing-strategy")
- .displayName("Listing Strategy")
+ .name("Listing Strategy")
.description("Specify how to determine new/updated entities. See each
strategy descriptions for detail.")
.required(true)
.allowableValues(BY_TIMESTAMPS, BY_ENTITIES, NO_TRACKING)
@@ -222,9 +221,8 @@ public class ListGCSBucket extends AbstractGCSProcessor {
.required(true)
.build();
- public static final PropertyDescriptor BUCKET = new PropertyDescriptor
- .Builder().name("gcs-bucket")
- .displayName("Bucket")
+ public static final PropertyDescriptor BUCKET = new
PropertyDescriptor.Builder()
+ .name("Bucket")
.description(BUCKET_DESC)
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
@@ -232,8 +230,7 @@ public class ListGCSBucket extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor PREFIX = new
PropertyDescriptor.Builder()
- .name("gcs-prefix")
- .displayName("Prefix")
+ .name("Prefix")
.description("The prefix used to filter the object list. In most
cases, it should end with a forward slash ('/').")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
@@ -241,8 +238,7 @@ public class ListGCSBucket extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor USE_GENERATIONS = new
PropertyDescriptor.Builder()
- .name("gcs-use-generations")
- .displayName("Use Generations")
+ .name("Use Generations")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -252,8 +248,7 @@ public class ListGCSBucket extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor RECORD_WRITER = new
PropertyDescriptor.Builder()
- .name("record-writer")
- .displayName("Record Writer")
+ .name("Record Writer")
.description("Specifies the Record Writer to use for creating the
listing. If not specified, one FlowFile will be created for each entity that is
listed. If the Record Writer is specified, " +
"all entities will be written to a single FlowFile instead of
adding attributes to individual FlowFiles.")
.required(false)
@@ -354,6 +349,11 @@ public class ListGCSBucket extends AbstractGCSProcessor {
config.renameProperty(ListedEntityTracker.OLD_TRACKING_STATE_CACHE_PROPERTY_NAME,
TRACKING_STATE_CACHE.getName());
config.renameProperty(ListedEntityTracker.OLD_TRACKING_TIME_WINDOW_PROPERTY_NAME,
TRACKING_TIME_WINDOW.getName());
config.renameProperty(ListedEntityTracker.OLD_INITIAL_LISTING_TARGET_PROPERTY_NAME,
INITIAL_LISTING_TARGET.getName());
+ config.renameProperty("listing-strategy", LISTING_STRATEGY.getName());
+ config.renameProperty("gcs-bucket", BUCKET.getName());
+ config.renameProperty("gcs-prefix", PREFIX.getName());
+ config.renameProperty("gcs-use-generations",
USE_GENERATIONS.getName());
+ config.renameProperty("record-writer", RECORD_WRITER.getName());
}
protected ListedEntityTracker<ListableBlob> createListedEntityTracker() {
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
index acc5569dcc..7c59e43dd4 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
@@ -37,6 +37,7 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.fileresource.service.api.FileResource;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
@@ -147,8 +148,7 @@ import static
org.apache.nifi.processors.transfer.ResourceTransferUtils.getFileR
})
public class PutGCSObject extends AbstractGCSProcessor {
public static final PropertyDescriptor BUCKET = new
PropertyDescriptor.Builder()
- .name("gcs-bucket")
- .displayName("Bucket")
+ .name("Bucket")
.description(BUCKET_DESC)
.required(true)
.defaultValue("${" + BUCKET_ATTR + "}")
@@ -157,8 +157,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor KEY = new
PropertyDescriptor.Builder()
- .name("gcs-key")
- .displayName("Key")
+ .name("Key")
.description(KEY_DESC)
.required(true)
.defaultValue("${filename}")
@@ -167,8 +166,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor CONTENT_TYPE = new
PropertyDescriptor.Builder()
- .name("gcs-content-type")
- .displayName("Content Type")
+ .name("Content Type")
.description("Content Type for the file, i.e. text/plain")
.defaultValue("${mime.type}")
.required(false)
@@ -178,8 +176,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
public static final PropertyDescriptor CRC32C = new
PropertyDescriptor.Builder()
- .name("gcs-object-crc32c")
- .displayName("CRC32C Checksum")
+ .name("CRC32C Checksum")
.description("CRC32C Checksum (encoded in Base64, big-Endian order) of
the file for server-side validation.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -187,8 +184,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor GZIPCONTENT = new
PropertyDescriptor.Builder()
- .name("gzip.content.enabled")
- .displayName("GZIP Compression Enabled")
+ .name("GZIP Compression Enabled")
.description("Signals to the GCS Blob Writer whether GZIP compression
during transfer is desired. " +
"False means do not gzip and can boost performance in many cases.")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -236,8 +232,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
);
public static final PropertyDescriptor ACL = new
PropertyDescriptor.Builder()
- .name("gcs-object-acl")
- .displayName("Object ACL")
+ .name("Object ACL")
.description("Access Control to be attached to the object
uploaded. Not providing this will revert to bucket defaults.")
.required(false)
.allowableValues(
@@ -251,8 +246,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
.build();
public static final PropertyDescriptor ENCRYPTION_KEY = new
PropertyDescriptor.Builder()
- .name("gcs-server-side-encryption-key")
- .displayName("Server Side Encryption Key")
+ .name("Server Side Encryption Key")
.description("An AES256 Encryption Key (encoded in base64) for
server-side encryption of the object.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -262,8 +256,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
public static final PropertyDescriptor OVERWRITE = new
PropertyDescriptor.Builder()
- .name("gcs-overwrite-object")
- .displayName("Overwrite Object")
+ .name("Overwrite Object")
.description("If false, the upload to GCS will succeed only if the
object does not exist.")
.required(true)
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
@@ -281,8 +274,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
);
public static final PropertyDescriptor CONTENT_DISPOSITION_TYPE = new
PropertyDescriptor.Builder()
- .name("gcs-content-disposition-type")
- .displayName("Content Disposition Type")
+ .name("Content Disposition Type")
.description("Type of RFC-6266 Content Disposition to be attached
to the object")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -313,6 +305,20 @@ public class PutGCSObject extends AbstractGCSProcessor {
return DESCRIPTORS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("gcs-bucket", BUCKET.getName());
+ config.renameProperty("gcs-key", KEY.getName());
+ config.renameProperty("gcs-content-type", CONTENT_TYPE.getName());
+ config.renameProperty("gcs-object-crc32c", CRC32C.getName());
+ config.renameProperty("gzip.content.enabled", GZIPCONTENT.getName());
+ config.renameProperty("gcs-object-acl", ACL.getName());
+ config.renameProperty("gcs-server-side-encryption-key",
ENCRYPTION_KEY.getName());
+ config.renameProperty("gcs-overwrite-object", OVERWRITE.getName());
+ config.renameProperty("gcs-content-disposition-type",
CONTENT_DISPOSITION_TYPE.getName());
+ }
+
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final
String propertyDescriptorName) {
return new PropertyDescriptor.Builder()
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/util/GoogleUtils.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/util/GoogleUtils.java
index 219c909fca..e9198244da 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/util/GoogleUtils.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/util/GoogleUtils.java
@@ -24,13 +24,13 @@ public class GoogleUtils {
public static final String GOOGLE_CLOUD_PLATFORM_SCOPE =
"https://www.googleapis.com/auth/cloud-platform";
public static final String GOOGLE_CLOUD_PUBSUB_SCOPE =
"https://www.googleapis.com/auth/pubsub";
public static final String GOOGLE_CLOUD_BIGQUERY_SCOPE =
"https://www.googleapis.com/auth/bigquery";
+ public static final String
OLD_GCP_CREDENTIALS_PROVIDER_SERVICE_PROPERTY_NAME =
"gcp-credentials-provider-service";
/**
* Links to the {@link GCPCredentialsService} which provides credentials
for this particular processor.
*/
public static final PropertyDescriptor GCP_CREDENTIALS_PROVIDER_SERVICE =
new PropertyDescriptor.Builder()
- .name("gcp-credentials-provider-service")
- .displayName("GCP Credentials Provider Service")
+ .name("GCP Credentials Provider Service")
.description("The Controller Service used to obtain Google Cloud
Platform credentials.")
.required(true)
.identifiesControllerService(GCPCredentialsService.class)
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGcpVisionProcessor.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGcpVisionProcessor.java
index 851726b0a1..0b87938447 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGcpVisionProcessor.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGcpVisionProcessor.java
@@ -29,10 +29,12 @@ import java.util.Set;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.gcp.credentials.service.GCPCredentialsService;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.gcp.util.GoogleUtils;
public abstract class AbstractGcpVisionProcessor extends AbstractProcessor {
public static final String GCP_OPERATION_KEY = "operationKey";
@@ -82,6 +84,11 @@ public abstract class AbstractGcpVisionProcessor extends
AbstractProcessor {
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+
config.renameProperty(GoogleUtils.OLD_GCP_CREDENTIALS_PROVIDER_SERVICE_PROPERTY_NAME,
GCP_CREDENTIALS_PROVIDER_SERVICE.getName());
+ }
+
protected ImageAnnotatorClient getVisionClient() {
return this.vision;
}
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGetGcpVisionAnnotateOperationStatus.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGetGcpVisionAnnotateOperationStatus.java
index 2a514bfaf0..5a33f74dc8 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGetGcpVisionAnnotateOperationStatus.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractGetGcpVisionAnnotateOperationStatus.java
@@ -32,6 +32,7 @@ import java.util.stream.Stream;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
@@ -40,8 +41,7 @@ import org.apache.nifi.processor.util.StandardValidators;
abstract public class AbstractGetGcpVisionAnnotateOperationStatus extends
AbstractGcpVisionProcessor {
public static final PropertyDescriptor OPERATION_KEY = new
PropertyDescriptor.Builder()
- .name("operationKey")
- .displayName("GCP Operation Key")
+ .name("GCP Operation Key")
.description("The unique identifier of the Vision operation.")
.defaultValue("${operationKey}")
.required(true)
@@ -109,5 +109,11 @@ abstract public class
AbstractGetGcpVisionAnnotateOperationStatus extends Abstra
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("operationKey", OPERATION_KEY.getName());
+ }
+
abstract protected GeneratedMessageV3 deserializeResponse(ByteString
responseValue) throws InvalidProtocolBufferException;
}
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractStartGcpVisionOperation.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractStartGcpVisionOperation.java
index a24beb4558..1ab680a16d 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractStartGcpVisionOperation.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/AbstractStartGcpVisionOperation.java
@@ -31,6 +31,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
@@ -38,8 +39,7 @@ import org.apache.nifi.processor.util.StandardValidators;
public abstract class AbstractStartGcpVisionOperation<B extends
com.google.protobuf.GeneratedMessageV3.Builder<B>> extends
AbstractGcpVisionProcessor {
public static final PropertyDescriptor FEATURE_TYPE = new
PropertyDescriptor.Builder()
- .name("vision-feature-type")
- .displayName("Vision Feature Type")
+ .name("Vision Feature Type")
.description("Type of GCP Vision Feature. The value of this
property applies when the JSON Payload property is configured. " +
"The JSON Payload property value can use Expression
Language to reference the value of ${vision-feature-type}")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
@@ -48,8 +48,7 @@ public abstract class AbstractStartGcpVisionOperation<B
extends com.google.proto
.defaultValue("TEXT_DETECTION")
.build();
public static final PropertyDescriptor OUTPUT_BUCKET = new
PropertyDescriptor.Builder()
- .name("output-bucket")
- .displayName("Output Bucket")
+ .name("Output Bucket")
.description("Name of the GCS bucket where the output of the
Vision job will be persisted. " +
"The value of this property applies when the JSON Payload
property is configured. " +
"The JSON Payload property value can use Expression
Language to reference the value of ${output-bucket}")
@@ -83,6 +82,13 @@ public abstract class AbstractStartGcpVisionOperation<B
extends com.google.proto
getVisionClient().close();
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("vision-feature-type", FEATURE_TYPE.getName());
+ config.renameProperty("output-bucket", OUTPUT_BUCKET.getName());
+ }
+
protected OperationFuture<?, ?> startOperation(ProcessSession session,
ProcessContext context, FlowFile flowFile) {
B builder = newBuilder();
InputStream inStream =
context.getProperty(getJsonPayloadPropertyDescriptor()).isSet()
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateFilesOperation.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateFilesOperation.java
index 41ba53a14b..ca38760575 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateFilesOperation.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateFilesOperation.java
@@ -30,6 +30,7 @@ import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.util.StandardValidators;
@Tags({"Google", "Cloud", "Machine Learning", "Vision"})
@@ -40,8 +41,7 @@ import org.apache.nifi.processor.util.StandardValidators;
})
public class StartGcpVisionAnnotateFilesOperation extends
AbstractStartGcpVisionOperation<AsyncBatchAnnotateFilesRequest.Builder> {
public static final PropertyDescriptor JSON_PAYLOAD = new
PropertyDescriptor.Builder()
- .name("json-payload")
- .displayName("JSON Payload")
+ .name("JSON Payload")
.description("JSON request for AWS Machine Learning services. The
Processor will use FlowFile content for the request when this property is not
specified.")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
@@ -81,6 +81,12 @@ public class StartGcpVisionAnnotateFilesOperation extends
AbstractStartGcpVision
return PROPERTY_DESCRIPTORS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("json-payload", JSON_PAYLOAD.getName());
+ }
+
@Override
AsyncBatchAnnotateFilesRequest.Builder newBuilder() {
return AsyncBatchAnnotateFilesRequest.newBuilder();
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateImagesOperation.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateImagesOperation.java
index 18192a87ff..aeefee4c88 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateImagesOperation.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/vision/StartGcpVisionAnnotateImagesOperation.java
@@ -30,6 +30,7 @@ import org.apache.nifi.annotation.documentation.SeeAlso;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.util.StandardValidators;
@Tags({"Google", "Cloud", "Machine Learning", "Vision"})
@@ -40,8 +41,7 @@ import org.apache.nifi.processor.util.StandardValidators;
})
public class StartGcpVisionAnnotateImagesOperation extends
AbstractStartGcpVisionOperation<AsyncBatchAnnotateImagesRequest.Builder> {
static final PropertyDescriptor JSON_PAYLOAD = new
PropertyDescriptor.Builder()
- .name("json-payload")
- .displayName("JSON Payload")
+ .name("JSON Payload")
.description("JSON request for AWS Machine Learning services. The
Processor will use FlowFile content for the request when this property is not
specified.")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(false)
@@ -81,6 +81,12 @@ public class StartGcpVisionAnnotateImagesOperation extends
AbstractStartGcpVisio
return PROPERTY_DESCRIPTORS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ super.migrateProperties(config);
+ config.renameProperty("json-payload", JSON_PAYLOAD.getName());
+ }
+
@Override
AsyncBatchAnnotateImagesRequest.Builder newBuilder() {
return AsyncBatchAnnotateImagesRequest.newBuilder();
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryIT.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryIT.java
index ba15106d21..c0c0f6354f 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryIT.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryIT.java
@@ -67,8 +67,12 @@ import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
+import static
org.apache.nifi.processors.gcp.bigquery.AbstractBigQueryProcessor.DATASET;
+import static
org.apache.nifi.processors.gcp.bigquery.AbstractBigQueryProcessor.TABLE_NAME;
+import static
org.apache.nifi.processors.gcp.bigquery.PutBigQuery.RECORD_READER;
import static org.apache.nifi.processors.gcp.bigquery.PutBigQuery.BATCH_TYPE;
import static org.apache.nifi.processors.gcp.bigquery.PutBigQuery.STREAM_TYPE;
+import static
org.apache.nifi.processors.gcp.bigquery.PutBigQuery.SKIP_INVALID_ROWS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -144,7 +148,7 @@ public class PutBigQueryIT {
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"2");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"2");
assertStreamingData(tableName);
@@ -160,7 +164,7 @@ public class PutBigQueryIT {
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_FAILURE, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_FAILURE).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"0");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_FAILURE).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"0");
TableResult result =
bigquery.listTableData(dataset.getDatasetId().getDataset(), tableName, schema);
assertFalse(result.getValues().iterator().hasNext());
@@ -173,13 +177,13 @@ public class PutBigQueryIT {
String tableName = prepareTable(STREAM_TYPE);
addRecordReader();
- runner.setProperty(BigQueryAttributes.SKIP_INVALID_ROWS_ATTR, "true");
+ runner.setProperty(SKIP_INVALID_ROWS, "true");
runner.enqueue(Paths.get("src/test/resources/bigquery/streaming-bad-data.json"));
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"1");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"1");
TableResult result =
bigquery.listTableData(dataset.getDatasetId().getDataset(), tableName, schema);
Iterator<FieldValueList> iterator = result.getValues().iterator();
@@ -200,7 +204,7 @@ public class PutBigQueryIT {
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"2");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"2");
assertStreamingData(tableName, true, false);
@@ -222,13 +226,13 @@ public class PutBigQueryIT {
runner.setProperty(jsonReader, DateTimeUtils.TIMESTAMP_FORMAT,
"MM-dd-yyyy HH:mm:ss z");
runner.enableControllerService(jsonReader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
runner.enqueue(Paths.get("src/test/resources/bigquery/streaming-correct-data-with-date-formatted.json"));
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"2");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"2");
assertStreamingData(tableName, false, true);
@@ -245,7 +249,7 @@ public class PutBigQueryIT {
runner.run();
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
"1");
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
"1");
deleteTable(tableName);
}
@@ -288,7 +292,7 @@ public class PutBigQueryIT {
runner.run();
runner.assertAllFlowFilesTransferred(AbstractBigQueryProcessor.REL_SUCCESS, 1);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
Integer.toString(recordCount));
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
Integer.toString(recordCount));
}
@Test
@@ -305,8 +309,8 @@ public class PutBigQueryIT {
// create table
bigquery.create(tableInfo);
- runner.setProperty(BigQueryAttributes.DATASET_ATTR,
dataset.getDatasetId().getDataset());
- runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);
+ runner.setProperty(DATASET, dataset.getDatasetId().getDataset());
+ runner.setProperty(TABLE_NAME, tableName);
runner.setProperty(PutBigQuery.TRANSFER_TYPE, BATCH_TYPE);
AvroReader reader = new AvroReader();
@@ -317,7 +321,7 @@ public class PutBigQueryIT {
runner.setProperty(reader, SchemaAccessUtils.SCHEMA_TEXT,
recordSchema);
runner.enableControllerService(reader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
runner.enqueue(Paths.get("src/test/resources/bigquery/avrodecimal.avro"));
@@ -346,8 +350,8 @@ public class PutBigQueryIT {
// create table
bigquery.create(tableInfo);
- runner.setProperty(BigQueryAttributes.DATASET_ATTR,
dataset.getDatasetId().getDataset());
- runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);
+ runner.setProperty(DATASET, dataset.getDatasetId().getDataset());
+ runner.setProperty(TABLE_NAME, tableName);
runner.setProperty(PutBigQuery.TRANSFER_TYPE, BATCH_TYPE);
AvroReader reader = new AvroReader();
@@ -358,7 +362,7 @@ public class PutBigQueryIT {
runner.setProperty(reader, SchemaAccessUtils.SCHEMA_TEXT,
recordSchema);
runner.enableControllerService(reader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
runner.enqueue(Paths.get("src/test/resources/bigquery/avrofloat.avro"));
@@ -387,8 +391,8 @@ public class PutBigQueryIT {
// create table
bigquery.create(tableInfo);
- runner.setProperty(BigQueryAttributes.DATASET_ATTR,
dataset.getDatasetId().getDataset());
- runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);
+ runner.setProperty(DATASET, dataset.getDatasetId().getDataset());
+ runner.setProperty(TABLE_NAME, tableName);
runner.setProperty(PutBigQuery.TRANSFER_TYPE, BATCH_TYPE);
AvroReader reader = new AvroReader();
@@ -399,7 +403,7 @@ public class PutBigQueryIT {
runner.setProperty(reader, SchemaAccessUtils.SCHEMA_TEXT,
recordSchema);
runner.enableControllerService(reader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
runner.enqueue(Paths.get("src/test/resources/bigquery/avroint.avro"));
@@ -423,8 +427,8 @@ public class PutBigQueryIT {
createTableForBatch(tableName);
}
- runner.setProperty(BigQueryAttributes.DATASET_ATTR,
dataset.getDatasetId().getDataset());
- runner.setProperty(BigQueryAttributes.TABLE_NAME_ATTR, tableName);
+ runner.setProperty(DATASET, dataset.getDatasetId().getDataset());
+ runner.setProperty(TABLE_NAME, tableName);
runner.setProperty(PutBigQuery.TRANSFER_TYPE, transferType);
return tableName;
@@ -435,13 +439,13 @@ public class PutBigQueryIT {
runner.addControllerService("reader", jsonReader);
runner.enableControllerService(jsonReader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
}
private void addRecordReaderWithSchema(String schema) throws
InitializationException, IOException {
JsonTreeReader jsonReader = new JsonTreeReader();
runner.addControllerService("reader", jsonReader);
- runner.setProperty(BigQueryAttributes.RECORD_READER_ATTR, "reader");
+ runner.setProperty(RECORD_READER, "reader");
String recordSchema = new
String(Files.readAllBytes(Paths.get(schema)));
runner.setProperty(jsonReader,
SchemaAccessUtils.SCHEMA_ACCESS_STRATEGY,
SchemaAccessUtils.SCHEMA_TEXT_PROPERTY);
diff --git
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryTest.java
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryTest.java
index 6e6c513801..3240b555a0 100644
---
a/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryTest.java
+++
b/nifi-extension-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/test/java/org/apache/nifi/processors/gcp/bigquery/PutBigQueryTest.java
@@ -272,8 +272,8 @@ public class PutBigQueryTest {
runner.assertAllFlowFilesTransferred(PutBigQuery.REL_SUCCESS,
iteration);
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
Integer.toString(entityCountFirst));
-
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(1).assertAttributeEquals(BigQueryAttributes.JOB_NB_RECORDS_ATTR,
Integer.toString(entityCountSecond));
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(0).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
Integer.toString(entityCountFirst));
+
runner.getFlowFilesForRelationship(PutBigQuery.REL_SUCCESS).get(1).assertAttributeEquals(PutBigQuery.JOB_NB_RECORDS_ATTR,
Integer.toString(entityCountSecond));
}
@Test
diff --git
a/nifi-extension-bundles/nifi-geohash-bundle/nifi-geohash-processors/src/main/java/org/apache/nifi/processors/geohash/GeohashRecord.java
b/nifi-extension-bundles/nifi-geohash-bundle/nifi-geohash-processors/src/main/java/org/apache/nifi/processors/geohash/GeohashRecord.java
index 0ea1d444f6..d22d7f5e35 100644
---
a/nifi-extension-bundles/nifi-geohash-bundle/nifi-geohash-processors/src/main/java/org/apache/nifi/processors/geohash/GeohashRecord.java
+++
b/nifi-extension-bundles/nifi-geohash-bundle/nifi-geohash-processors/src/main/java/org/apache/nifi/processors/geohash/GeohashRecord.java
@@ -29,6 +29,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@@ -89,8 +90,7 @@ public class GeohashRecord extends AbstractProcessor {
}
public static final PropertyDescriptor MODE = new
PropertyDescriptor.Builder()
- .name("mode")
- .displayName("Mode")
+ .name("Mode")
.description("Specifies whether to encode latitude/longitude to
geohash or decode geohash to latitude/longitude")
.required(true)
.allowableValues(ProcessingMode.values())
@@ -98,8 +98,7 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor ROUTING_STRATEGY = new
PropertyDescriptor.Builder()
- .name("routing-strategy")
- .displayName("Routing Strategy")
+ .name("Routing Strategy")
.description("Specifies how to route flowfiles after encoding or
decoding being performed. "
+ "SKIP will enrich those records that can be enriched and
skip the rest. "
+ "The SKIP strategy will route a flowfile to failure only
if unable to parse the data. "
@@ -114,24 +113,21 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor RECORD_READER = new
PropertyDescriptor.Builder()
- .name("record-reader")
- .displayName("Record Reader")
+ .name("Record Reader")
.description("Specifies the record reader service to use for
reading incoming data")
.required(true)
.identifiesControllerService(RecordReaderFactory.class)
.build();
public static final PropertyDescriptor RECORD_WRITER = new
PropertyDescriptor.Builder()
- .name("record-writer")
- .displayName("Record Writer")
+ .name("Record Writer")
.description("Specifies the record writer service to use for
writing data")
.required(true)
.identifiesControllerService(RecordSetWriterFactory.class)
.build();
public static final PropertyDescriptor LATITUDE_RECORD_PATH = new
PropertyDescriptor.Builder()
- .name("latitude-record-path")
- .displayName("Latitude Record Path")
+ .name("Latitude Record Path")
.description("In the ENCODE mode, this property specifies the
record path to retrieve the latitude values. "
+ "Latitude values should be in the range of [-90, 90];
invalid values will be logged at warn level. "
+ "In the DECODE mode, this property specifies the record
path to put the latitude value")
@@ -141,8 +137,7 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor LONGITUDE_RECORD_PATH = new
PropertyDescriptor.Builder()
- .name("longitude-record-path")
- .displayName("Longitude Record Path")
+ .name("Longitude Record Path")
.description("In the ENCODE mode, this property specifies the
record path to retrieve the longitude values; "
+ "Longitude values should be in the range of [-180, 180];
invalid values will be logged at warn level. "
+ "In the DECODE mode, this property specifies the record
path to put the longitude value")
@@ -152,8 +147,7 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor GEOHASH_RECORD_PATH = new
PropertyDescriptor.Builder()
- .name("geohash-record-path")
- .displayName("Geohash Record Path")
+ .name("Geohash Record Path")
.description("In the ENCODE mode, this property specifies the
record path to put the geohash value; "
+ "in the DECODE mode, this property specifies the record
path to retrieve the geohash value")
.required(true)
@@ -162,8 +156,7 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor GEOHASH_FORMAT = new
PropertyDescriptor.Builder()
- .name("geohash-format")
- .displayName("Geohash Format")
+ .name("Geohash Format")
.description("In the ENCODE mode, this property specifies the
desired format for encoding geohash; "
+ "in the DECODE mode, this property specifies the format
of geohash provided")
.required(true)
@@ -172,8 +165,7 @@ public class GeohashRecord extends AbstractProcessor {
.build();
public static final PropertyDescriptor GEOHASH_LEVEL = new
PropertyDescriptor.Builder()
- .name("geohash-level")
- .displayName("Geohash Level")
+ .name("Geohash Level")
.description("The integer precision level(1-12) desired for
encoding geohash")
.required(true)
.addValidator(StandardValidators.createLongValidator(1, 12, true))
@@ -376,6 +368,19 @@ public class GeohashRecord extends AbstractProcessor {
routingStrategyExecutor.transferFlowFiles(session, input, output,
notMatched);
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("mode", MODE.getName());
+ config.renameProperty("routing-strategy", ROUTING_STRATEGY.getName());
+ config.renameProperty("record-reader", RECORD_READER.getName());
+ config.renameProperty("record-writer", RECORD_WRITER.getName());
+ config.renameProperty("latitude-record-path",
LATITUDE_RECORD_PATH.getName());
+ config.renameProperty("longitude-record-path",
LONGITUDE_RECORD_PATH.getName());
+ config.renameProperty("geohash-record-path",
GEOHASH_RECORD_PATH.getName());
+ config.renameProperty("geohash-format", GEOHASH_FORMAT.getName());
+ config.renameProperty("geohash-level", GEOHASH_LEVEL.getName());
+ }
+
private interface RoutingStrategyExecutor {
void writeFlowFiles(Record record, RecordSetWriter writer,
RecordSetWriter notMatchedWriter, boolean updated) throws IOException;
diff --git
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java
index b8bc7dedf9..47c3e8626d 100644
---
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java
+++
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/AbstractGraphExecutor.java
@@ -36,15 +36,13 @@ import java.util.stream.Collectors;
*/
abstract class AbstractGraphExecutor extends AbstractProcessor {
public static final PropertyDescriptor CLIENT_SERVICE = new
PropertyDescriptor.Builder()
- .name("graph-client-service")
- .displayName("Client Service")
+ .name("Client Service")
.description("The graph client service for connecting to the graph
database.")
.required(true)
.identifiesControllerService(GraphClientService.class)
.build();
public static final PropertyDescriptor QUERY = new
PropertyDescriptor.Builder()
- .name("graph-query")
- .displayName("Graph Query")
+ .name("Graph Query")
.description("Specifies the graph query. If it is left blank, the
processor will attempt " +
"to get the query from body.")
.required(false)
diff --git
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
index 7b5d9bf816..b04d5042d7 100644
---
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
+++
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
@@ -28,6 +28,7 @@ import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.graph.GraphClientService;
import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
@@ -144,6 +145,12 @@ public class ExecuteGraphQuery extends
AbstractGraphExecutor {
}
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("graph-client-service",
CLIENT_SERVICE.getName());
+ config.renameProperty("graph-query", QUERY.getName());
+ }
+
protected String getQuery(ProcessContext context, ProcessSession session,
FlowFile input) {
String query =
context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue();
if (StringUtils.isEmpty(query) && input != null) {
diff --git
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java
index 278fd1b876..bbaf4c75f0 100644
---
a/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java
+++
b/nifi-extension-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQueryRecord.java
@@ -30,6 +30,7 @@ import org.apache.nifi.components.Validator;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.graph.GraphClientService;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
@@ -72,8 +73,7 @@ import java.util.stream.Collectors;
public class ExecuteGraphQueryRecord extends AbstractGraphExecutor {
public static final PropertyDescriptor CLIENT_SERVICE = new
PropertyDescriptor.Builder()
- .name("client-service")
- .displayName("Client Service")
+ .name("Client Service")
.description("The graph client service for connecting to a graph
database.")
.identifiesControllerService(GraphClientService.class)
.addValidator(Validator.VALID)
@@ -81,8 +81,7 @@ public class ExecuteGraphQueryRecord extends
AbstractGraphExecutor {
.build();
public static final PropertyDescriptor READER_SERVICE = new
PropertyDescriptor.Builder()
- .name("reader-service")
- .displayName("Record Reader")
+ .name("Record Reader")
.description("The record reader to use with this processor.")
.identifiesControllerService(RecordReaderFactory.class)
.required(true)
@@ -90,8 +89,7 @@ public class ExecuteGraphQueryRecord extends
AbstractGraphExecutor {
.build();
public static final PropertyDescriptor WRITER_SERVICE = new
PropertyDescriptor.Builder()
- .name("writer-service")
- .displayName("Failed Record Writer")
+ .name("Failed Record Writer")
.description("The record writer to use for writing failed
records.")
.identifiesControllerService(RecordSetWriterFactory.class)
.required(true)
@@ -99,8 +97,7 @@ public class ExecuteGraphQueryRecord extends
AbstractGraphExecutor {
.build();
public static final PropertyDescriptor SUBMISSION_SCRIPT = new
PropertyDescriptor.Builder()
- .name("record-script")
- .displayName("Graph Record Script")
+ .name("Graph Record Script")
.description("Script to perform the business logic on graph, using
flow file attributes and custom properties " +
"as variable-value pairs in its logic.")
.required(true)
@@ -171,6 +168,14 @@ public class ExecuteGraphQueryRecord extends
AbstractGraphExecutor {
recordPathCache = new RecordPathCache(100);
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("client-service", CLIENT_SERVICE.getName());
+ config.renameProperty("reader-service", READER_SERVICE.getName());
+ config.renameProperty("writer-service", WRITER_SERVICE.getName());
+ config.renameProperty("record-script", SUBMISSION_SCRIPT.getName());
+ }
+
private Object getRecordValue(Record record, RecordPath recordPath) {
final RecordPathResult result = recordPath.evaluate(record);
final List<FieldValue> values =
result.getSelectedFields().collect(Collectors.toList());
diff --git
a/nifi-extension-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java
b/nifi-extension-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java
index 4a89d8dbae..527073632e 100644
---
a/nifi-extension-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java
+++
b/nifi-extension-bundles/nifi-graph-bundle/nifi-neo4j-cypher-service/src/main/java/org/apache/nifi/graph/Neo4JCypherClientService.java
@@ -27,6 +27,7 @@ import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.neo4j.driver.AuthTokens;
@@ -59,8 +60,7 @@ import static
org.neo4j.driver.Config.TrustStrategy.trustCustomCertificateSigned
"Neo4J should break driver compatibility between 4.X and a future
release.")
public class Neo4JCypherClientService extends AbstractControllerService
implements GraphClientService {
public static final PropertyDescriptor CONNECTION_URL = new
PropertyDescriptor.Builder()
- .name("neo4j-connection-url")
- .displayName("Neo4j Connection URL")
+ .name("Neo4j Connection URL")
.description("Neo4J endpoing to connect to.")
.required(true)
.defaultValue("bolt://localhost:7687")
@@ -69,8 +69,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor USERNAME = new
PropertyDescriptor.Builder()
- .name("neo4j-username")
- .displayName("Username")
+ .name("Username")
.description("Username for accessing Neo4J")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
@@ -78,8 +77,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor PASSWORD = new
PropertyDescriptor.Builder()
- .name("neo4j-password")
- .displayName("Password")
+ .name("Password")
.description("Password for Neo4J user. A dummy non-blank password
is required even if it disabled on the server.")
.required(true)
.sensitive(true)
@@ -87,8 +85,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor CONNECTION_TIMEOUT = new
PropertyDescriptor.Builder()
- .name("neo4j-max-connection-time-out")
- .displayName("Neo4J Max Connection Time Out (seconds)")
+ .name("Connection Timeout")
.description("The maximum time for establishing connection to the
Neo4j")
.defaultValue("5 seconds")
.required(true)
@@ -98,8 +95,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor MAX_CONNECTION_POOL_SIZE = new
PropertyDescriptor.Builder()
- .name("neo4j-max-connection-pool-size")
- .displayName("Neo4J Max Connection Pool Size")
+ .name("Connection Pool Size")
.description("The maximum connection pool size for Neo4j.")
.defaultValue("100")
.required(true)
@@ -109,8 +105,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor MAX_CONNECTION_ACQUISITION_TIMEOUT
= new PropertyDescriptor.Builder()
- .name("neo4j-max-connection-acquisition-timeout")
- .displayName("Neo4J Max Connection Acquisition Timeout")
+ .name("Connection Acquisition Timeout")
.description("The maximum connection acquisition timeout.")
.defaultValue("60 second")
.required(true)
@@ -120,8 +115,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor IDLE_TIME_BEFORE_CONNECTION_TEST =
new PropertyDescriptor.Builder()
- .name("neo4j-idle-time-before-test")
- .displayName("Neo4J Idle Time Before Connection Test")
+ .name("Idle Time Before Connection Test")
.description("The idle time before connection test.")
.defaultValue("60 seconds")
.required(true)
@@ -131,8 +125,7 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
.build();
public static final PropertyDescriptor MAX_CONNECTION_LIFETIME = new
PropertyDescriptor.Builder()
- .name("neo4j-max-connection-lifetime")
- .displayName("Neo4J Max Connection Lifetime")
+ .name("Connection Lifetime")
.description("The maximum connection lifetime")
.defaultValue("3600 seconds")
.required(true)
@@ -189,6 +182,18 @@ public class Neo4JCypherClientService extends
AbstractControllerService implemen
return PROPERTY_DESCRIPTORS;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("neo4j-connection-url",
CONNECTION_URL.getName());
+ config.renameProperty("neo4j-username", USERNAME.getName());
+ config.renameProperty("neo4j-password", PASSWORD.getName());
+ config.renameProperty("neo4j-max-connection-time-out",
CONNECTION_TIMEOUT.getName());
+ config.renameProperty("neo4j-max-connection-pool-size",
MAX_CONNECTION_POOL_SIZE.getName());
+ config.renameProperty("neo4j-max-connection-acquisition-timeout",
MAX_CONNECTION_ACQUISITION_TIMEOUT.getName());
+ config.renameProperty("neo4j-idle-time-before-test",
IDLE_TIME_BEFORE_CONNECTION_TEST.getName());
+ config.renameProperty("neo4j-max-connection-lifetime",
MAX_CONNECTION_LIFETIME.getName());
+ }
+
protected Driver getDriver(ConfigurationContext context) {
connectionUrl =
context.getProperty(CONNECTION_URL).evaluateAttributeExpressions().getValue();
username =
context.getProperty(USERNAME).evaluateAttributeExpressions().getValue();
diff --git
a/nifi-extension-bundles/nifi-groovyx-bundle/nifi-groovyx-processors/src/main/java/org/apache/nifi/processors/groovyx/ExecuteGroovyScript.java
b/nifi-extension-bundles/nifi-groovyx-bundle/nifi-groovyx-processors/src/main/java/org/apache/nifi/processors/groovyx/ExecuteGroovyScript.java
index 6e35b44a21..5859b6a411 100644
---
a/nifi-extension-bundles/nifi-groovyx-bundle/nifi-groovyx-processors/src/main/java/org/apache/nifi/processors/groovyx/ExecuteGroovyScript.java
+++
b/nifi-extension-bundles/nifi-groovyx-bundle/nifi-groovyx-processors/src/main/java/org/apache/nifi/processors/groovyx/ExecuteGroovyScript.java
@@ -50,6 +50,7 @@ import org.apache.nifi.components.state.Scope;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.migration.PropertyConfiguration;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
@@ -96,8 +97,7 @@ public class ExecuteGroovyScript extends AbstractProcessor {
+ "import org.apache.nifi.processor.util.*;" + "import
org.apache.nifi.processors.script.*;" + "import
org.apache.nifi.logging.ComponentLog;";
public static final PropertyDescriptor SCRIPT_FILE = new
PropertyDescriptor.Builder()
- .name("groovyx-script-file")
- .displayName("Script File")
+ .name("Script File")
.required(false)
.description("Path to script file to execute. Only one of Script
File or Script Body may be used")
.identifiesExternalResource(ResourceCardinality.SINGLE,
ResourceType.FILE)
@@ -105,8 +105,7 @@ public class ExecuteGroovyScript extends AbstractProcessor {
.build();
public static final PropertyDescriptor SCRIPT_BODY = new
PropertyDescriptor.Builder()
- .name("groovyx-script-body")
- .displayName("Script Body")
+ .name("Script Body")
.required(false)
.description("Body of script to execute. Only one of Script File
or Script Body may be used")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -115,8 +114,7 @@ public class ExecuteGroovyScript extends AbstractProcessor {
public static String[] VALID_FAIL_STRATEGY = {"rollback", "transfer to
failure"};
public static final PropertyDescriptor FAIL_STRATEGY = new
PropertyDescriptor.Builder()
- .name("groovyx-failure-strategy")
- .displayName("Failure strategy")
+ .name("Failure Strategy")
.description("What to do with unhandled exceptions. If you want to
manage exception by code then keep the default value `rollback`."
+ " If `transfer to failure` selected and unhandled
exception occurred then all flowFiles received from incoming queues in this
session"
+ " will be transferred to `failure` relationship with
additional attributes set: ERROR_MESSAGE and ERROR_STACKTRACE."
@@ -129,8 +127,7 @@ public class ExecuteGroovyScript extends AbstractProcessor {
.build();
public static final PropertyDescriptor ADD_CLASSPATH = new
PropertyDescriptor.Builder()
- .name("groovyx-additional-classpath")
- .displayName("Additional classpath")
+ .name("Additional Classpath")
.required(false)
.description("Classpath list separated by semicolon or comma. You
can use masks like `*`, `*.jar` in file name.")
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
@@ -342,6 +339,14 @@ public class ExecuteGroovyScript extends AbstractProcessor
{
return script;
}
+ @Override
+ public void migrateProperties(PropertyConfiguration config) {
+ config.renameProperty("groovyx-script-file", SCRIPT_FILE.getName());
+ config.renameProperty("groovyx-script-body", SCRIPT_BODY.getName());
+ config.renameProperty("groovyx-failure-strategy",
FAIL_STRATEGY.getName());
+ config.renameProperty("groovyx-additional-classpath",
ADD_CLASSPATH.getName());
+ }
+
/**
* init SQL variables from DBCP services
*/