exceptionfactory commented on code in PR #6416:
URL: https://github.com/apache/nifi/pull/6416#discussion_r1007536186


##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;

Review Comment:
   The `1024` value should be set as the default value of the `MAX_ROW_NUMBER` 
property descriptor.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;
+
+        try (final InputStream inputStream = processSession.read(flowFile);
+             final RecordReader recordReader =
+                     recordParserFactory.createRecordReader(flowFile, 
inputStream, getLogger())) {
+            boolean needInitFormatter;
+            IoTDBSchema schema;
+            ValidationResult result;
+
+            result =
+                    schemaProperty != null
+                            ? validateSchemaAttribute(schemaProperty)
+                            : validateSchema(recordReader.getSchema());
+
+            if (!result.getKey()) {
+                getLogger().error("The property `schema` has an error: {}", 
result.getValue());
+                inputStream.close();
+                recordReader.close();
+                processSession.transfer(flowFile, REL_FAILURE);
+                return;
+            } else {
+                if (result.getValue() != null) {
+                    getLogger().warn("The property `schema` has a warn: {}", 
result.getValue());
+                }
+            }
+
+            schema =
+                    schemaProperty != null
+                            ? mapper.readValue(schemaProperty, 
IoTDBSchema.class)
+                            : convertSchema(recordReader.getSchema());
+
+            List<String> fieldNames = schema.getFieldNames(prefix);
+
+            needInitFormatter = schema.getTimeType() != 
IoTDBSchema.TimeType.LONG;
+
+            HashMap<String, Tablet> tablets = generateTablets(schema, prefix, 
maxRowNumber);
+            DateTimeFormatter format = null;
+
+            Record record;
+
+            while ((record = recordReader.nextRecord()) != null) {
+                Object[] values = record.getValues();
+                if (format == null && needInitFormatter) {
+                    format = initFormatter((String) values[0]);
+                    if (format == null) {
+                        getLogger().error("{} Record [{}] time format not 
supported", flowFile, values[0]);
+                        inputStream.close();
+                        recordReader.close();
+                        processSession.transfer(flowFile, REL_FAILURE);
+                        return;
+                    }
+                }
+
+                long timestamp;
+                if (needInitFormatter) {
+                    timestamp = Timestamp
+                            .valueOf(LocalDateTime.parse((String) values[0], 
format))
+                            .getTime();
+                } else {
+                    timestamp = (Long) values[0];
+                }
+
+                boolean isFulled = false;
+
+                for (Map.Entry<String, Tablet> entry : tablets.entrySet()) {
+                    String device = entry.getKey();
+                    Tablet tablet = entry.getValue();
+                    int rowIndex = tablet.rowSize++;
+
+                    tablet.addTimestamp(rowIndex, timestamp);
+                    List<MeasurementSchema> measurements = tablet.getSchemas();
+                    for (MeasurementSchema measurement : measurements) {
+                        String tsName =
+                                new StringBuilder()
+                                        .append(device)
+                                        .append(".")
+                                        .append(measurement.getMeasurementId())
+                                        .toString();
+                        int valueIndex = fieldNames.indexOf(tsName) + 1;
+                        Object value;
+                        TSDataType type = measurement.getType();
+                        if (values[valueIndex] != null) {
+                            try {
+                                value = convertType(values[valueIndex], type);
+                            } catch (Exception e) {
+                                getLogger().error("The value {{}} can't be 
converted to the type {{}}", values[valueIndex], type);
+                                processSession.transfer(flowFile, REL_FAILURE);
+                                return;
+                            }
+                        } else
+                            value = null;
+                        tablet.addValue(measurement.getMeasurementId(), 
rowIndex, value);
+                    }
+                    isFulled = tablet.rowSize == tablet.getMaxRowNumber();
+                }
+                if (isFulled) {
+                    if (aligned) {
+                        session.get().insertAlignedTablets(tablets);
+                    } else {
+                        session.get().insertTablets(tablets);
+                    }
+                    tablets.values().forEach(tablet -> tablet.reset());
+                }
+            }
+
+            AtomicBoolean hasRest = new AtomicBoolean(false);
+            tablets.forEach(
+                    (device, tablet) -> {
+                        if (hasRest.get() == false && tablet.rowSize != 0) {
+                            hasRest.set(true);
+                        }
+                    });
+            if (hasRest.get()) {
+                if (aligned) {
+                    session.get().insertAlignedTablets(tablets);
+                } else {
+                    session.get().insertTablets(tablets);
+                }
+            }

Review Comment:
   This `try` block is large and has many error conditions. It would be easier 
to follow and maintain if the various checks could be moved to separate 
methods, which would throw exceptions. That would allow any problems to be 
caught by the outer catch, and result in the FlowFile being routed to failure.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;
+
+        try (final InputStream inputStream = processSession.read(flowFile);
+             final RecordReader recordReader =
+                     recordParserFactory.createRecordReader(flowFile, 
inputStream, getLogger())) {
+            boolean needInitFormatter;
+            IoTDBSchema schema;
+            ValidationResult result;
+
+            result =
+                    schemaProperty != null
+                            ? validateSchemaAttribute(schemaProperty)
+                            : validateSchema(recordReader.getSchema());
+
+            if (!result.getKey()) {
+                getLogger().error("The property `schema` has an error: {}", 
result.getValue());
+                inputStream.close();
+                recordReader.close();
+                processSession.transfer(flowFile, REL_FAILURE);
+                return;
+            } else {
+                if (result.getValue() != null) {
+                    getLogger().warn("The property `schema` has a warn: {}", 
result.getValue());
+                }
+            }
+
+            schema =
+                    schemaProperty != null
+                            ? mapper.readValue(schemaProperty, 
IoTDBSchema.class)
+                            : convertSchema(recordReader.getSchema());
+
+            List<String> fieldNames = schema.getFieldNames(prefix);
+
+            needInitFormatter = schema.getTimeType() != 
IoTDBSchema.TimeType.LONG;
+
+            HashMap<String, Tablet> tablets = generateTablets(schema, prefix, 
maxRowNumber);
+            DateTimeFormatter format = null;
+
+            Record record;
+
+            while ((record = recordReader.nextRecord()) != null) {
+                Object[] values = record.getValues();
+                if (format == null && needInitFormatter) {
+                    format = initFormatter((String) values[0]);
+                    if (format == null) {
+                        getLogger().error("{} Record [{}] time format not 
supported", flowFile, values[0]);
+                        inputStream.close();
+                        recordReader.close();
+                        processSession.transfer(flowFile, REL_FAILURE);
+                        return;
+                    }
+                }
+
+                long timestamp;
+                if (needInitFormatter) {
+                    timestamp = Timestamp
+                            .valueOf(LocalDateTime.parse((String) values[0], 
format))
+                            .getTime();
+                } else {
+                    timestamp = (Long) values[0];
+                }
+
+                boolean isFulled = false;
+
+                for (Map.Entry<String, Tablet> entry : tablets.entrySet()) {
+                    String device = entry.getKey();
+                    Tablet tablet = entry.getValue();
+                    int rowIndex = tablet.rowSize++;
+
+                    tablet.addTimestamp(rowIndex, timestamp);
+                    List<MeasurementSchema> measurements = tablet.getSchemas();
+                    for (MeasurementSchema measurement : measurements) {
+                        String tsName =
+                                new StringBuilder()
+                                        .append(device)
+                                        .append(".")
+                                        .append(measurement.getMeasurementId())
+                                        .toString();
+                        int valueIndex = fieldNames.indexOf(tsName) + 1;
+                        Object value;
+                        TSDataType type = measurement.getType();
+                        if (values[valueIndex] != null) {
+                            try {
+                                value = convertType(values[valueIndex], type);
+                            } catch (Exception e) {
+                                getLogger().error("The value {{}} can't be 
converted to the type {{}}", values[valueIndex], type);
+                                processSession.transfer(flowFile, REL_FAILURE);
+                                return;
+                            }
+                        } else
+                            value = null;
+                        tablet.addValue(measurement.getMeasurementId(), 
rowIndex, value);
+                    }
+                    isFulled = tablet.rowSize == tablet.getMaxRowNumber();
+                }
+                if (isFulled) {
+                    if (aligned) {
+                        session.get().insertAlignedTablets(tablets);
+                    } else {
+                        session.get().insertTablets(tablets);
+                    }
+                    tablets.values().forEach(tablet -> tablet.reset());
+                }
+            }
+
+            AtomicBoolean hasRest = new AtomicBoolean(false);
+            tablets.forEach(
+                    (device, tablet) -> {
+                        if (hasRest.get() == false && tablet.rowSize != 0) {
+                            hasRest.set(true);
+                        }
+                    });
+            if (hasRest.get()) {
+                if (aligned) {
+                    session.get().insertAlignedTablets(tablets);
+                } else {
+                    session.get().insertTablets(tablets);
+                }
+            }
+        } catch (Exception e) {
+            getLogger().error("Processing failed {}", flowFile, e);
+            processSession.transfer(flowFile, REL_FAILURE);
+            return;
+        }
+        processSession.transfer(flowFile, REL_SUCCESS);
+    }
+
+    @Override
+    public boolean isStateful(ProcessContext context) {
+        return super.isStateful(context);
+    }

Review Comment:
   This method can be removed.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;
+
+        try (final InputStream inputStream = processSession.read(flowFile);
+             final RecordReader recordReader =
+                     recordParserFactory.createRecordReader(flowFile, 
inputStream, getLogger())) {
+            boolean needInitFormatter;
+            IoTDBSchema schema;
+            ValidationResult result;
+
+            result =
+                    schemaProperty != null
+                            ? validateSchemaAttribute(schemaProperty)
+                            : validateSchema(recordReader.getSchema());
+
+            if (!result.getKey()) {
+                getLogger().error("The property `schema` has an error: {}", 
result.getValue());
+                inputStream.close();
+                recordReader.close();
+                processSession.transfer(flowFile, REL_FAILURE);
+                return;
+            } else {
+                if (result.getValue() != null) {
+                    getLogger().warn("The property `schema` has a warn: {}", 
result.getValue());
+                }
+            }
+
+            schema =
+                    schemaProperty != null
+                            ? mapper.readValue(schemaProperty, 
IoTDBSchema.class)
+                            : convertSchema(recordReader.getSchema());
+
+            List<String> fieldNames = schema.getFieldNames(prefix);
+
+            needInitFormatter = schema.getTimeType() != 
IoTDBSchema.TimeType.LONG;
+
+            HashMap<String, Tablet> tablets = generateTablets(schema, prefix, 
maxRowNumber);
+            DateTimeFormatter format = null;
+
+            Record record;
+
+            while ((record = recordReader.nextRecord()) != null) {
+                Object[] values = record.getValues();
+                if (format == null && needInitFormatter) {
+                    format = initFormatter((String) values[0]);
+                    if (format == null) {
+                        getLogger().error("{} Record [{}] time format not 
supported", flowFile, values[0]);
+                        inputStream.close();
+                        recordReader.close();
+                        processSession.transfer(flowFile, REL_FAILURE);
+                        return;
+                    }
+                }
+
+                long timestamp;
+                if (needInitFormatter) {
+                    timestamp = Timestamp
+                            .valueOf(LocalDateTime.parse((String) values[0], 
format))
+                            .getTime();
+                } else {
+                    timestamp = (Long) values[0];
+                }
+
+                boolean isFulled = false;
+
+                for (Map.Entry<String, Tablet> entry : tablets.entrySet()) {
+                    String device = entry.getKey();
+                    Tablet tablet = entry.getValue();
+                    int rowIndex = tablet.rowSize++;
+
+                    tablet.addTimestamp(rowIndex, timestamp);
+                    List<MeasurementSchema> measurements = tablet.getSchemas();
+                    for (MeasurementSchema measurement : measurements) {
+                        String tsName =
+                                new StringBuilder()
+                                        .append(device)
+                                        .append(".")
+                                        .append(measurement.getMeasurementId())
+                                        .toString();
+                        int valueIndex = fieldNames.indexOf(tsName) + 1;
+                        Object value;
+                        TSDataType type = measurement.getType();
+                        if (values[valueIndex] != null) {
+                            try {
+                                value = convertType(values[valueIndex], type);
+                            } catch (Exception e) {
+                                getLogger().error("The value {{}} can't be 
converted to the type {{}}", values[valueIndex], type);
+                                processSession.transfer(flowFile, REL_FAILURE);
+                                return;
+                            }
+                        } else
+                            value = null;
+                        tablet.addValue(measurement.getMeasurementId(), 
rowIndex, value);
+                    }
+                    isFulled = tablet.rowSize == tablet.getMaxRowNumber();
+                }
+                if (isFulled) {
+                    if (aligned) {
+                        session.get().insertAlignedTablets(tablets);
+                    } else {
+                        session.get().insertTablets(tablets);
+                    }
+                    tablets.values().forEach(tablet -> tablet.reset());
+                }
+            }
+
+            AtomicBoolean hasRest = new AtomicBoolean(false);
+            tablets.forEach(
+                    (device, tablet) -> {
+                        if (hasRest.get() == false && tablet.rowSize != 0) {
+                            hasRest.set(true);
+                        }
+                    });
+            if (hasRest.get()) {
+                if (aligned) {
+                    session.get().insertAlignedTablets(tablets);
+                } else {
+                    session.get().insertTablets(tablets);
+                }
+            }
+        } catch (Exception e) {
+            getLogger().error("Processing failed {}", flowFile, e);
+            processSession.transfer(flowFile, REL_FAILURE);
+            return;
+        }
+        processSession.transfer(flowFile, REL_SUCCESS);
+    }
+
+    @Override
+    public boolean isStateful(ProcessContext context) {
+        return super.isStateful(context);
+    }
+
+    @OnUnscheduled
+    public void stop(ProcessContext context) {
+        super.stop(context);
+    }

Review Comment:
   This method can be removed.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }

Review Comment:
   This method can be removed.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);

Review Comment:
   This should be moved after `processSession.get()`



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/test/java/org/apache/nifi/processors/AbstractIoTDBUT.java:
##########
@@ -0,0 +1,318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.model.ValidationResult;
+
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeParseException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class AbstractIoTDBUT {
+    private static TestAbstractIoTDBProcessor processor;
+
+    @Before
+    public void init() {
+        processor = new TestAbstractIoTDBProcessor();
+    }
+
+    @Test
+    public void testValidateSchemaAttribute() {
+        // normal schema
+        String schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        ValidationResult result = 
processor.validateSchemaAttribute(schemaAttribute);
+         assertTrue(result.getKey());
+         assertEquals(null, result.getValue());
+
+        // schema with wrong field
+        schemaAttribute =
+                "{\n"
+                        + "\t\"time\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        String exceptedMsg = "The JSON of schema must contain `timeType` and 
`fields`";
+
+         assertEquals(false, result.getKey());
+         assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong time type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"int\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `timeType`: int, there are only two options `LONG` 
and `STRING` for this property";
+
+         assertEquals(false, result.getKey());
+         assertEquals(exceptedMsg, result.getValue());
+
+        // schema without tsName
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "`tsName` or `dataType` has not been set";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema without data type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "`tsName` or `dataType` has not been set";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong data type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `dataType`: INT. The supported dataTypes are [FLOAT, 
INT64, INT32, TEXT, DOUBLE, BOOLEAN]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong key
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encode\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "Unknown property or properties: [encode]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong compression type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\",\n"
+                        + "\t\t\"compressionType\": \"ZIP\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\",\n"
+                        + "\t\t\"compressionType\": \"GZIP\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `compressionType`: ZIP, The supported compressionType 
are [LZO, PAA, SDT, UNCOMPRESSED, PLA, LZ4, GZIP, SNAPPY]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+    }
+
+    @Test
+    public void testParseSchema() {
+        ArrayList<String> filedNames =
+                new ArrayList<String>() {
+                    {
+                        add("root.sg1.d1.s1");
+                        add("root.sg1.d1.s2");
+                        add("root.sg1.d2.s1");
+                    }
+                };
+        Map<String, List<String>> deviceMeasurementMap = 
processor.parseSchema(filedNames);
+        HashMap<String, List<String>> exceptedMap =
+                new HashMap<String, List<String>>() {
+                    {
+                        put(
+                                "root.sg1.d1",
+                                new ArrayList<String>() {
+                                    {
+                                        add("s1");
+                                        add("s2");
+                                    }
+                                });
+                        put(
+                                "root.sg1.d2",
+                                new ArrayList<String>() {
+                                    {
+                                        add("s1");
+                                    }
+                                });
+                    }
+                };
+        assertEquals(exceptedMap, deviceMeasurementMap);
+    }
+
+    @Test
+    public void testGenerateTablet() throws JsonProcessingException {
+        String schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        IoTDBSchema schema = new ObjectMapper().readValue(schemaAttribute, 
IoTDBSchema.class);
+        HashMap<String, Tablet> tablets = processor.generateTablets(schema, 
"root.test_sg.test_d1." ,1);
+
+        HashMap<String, Tablet> exceptedTablets = new HashMap<>();
+        ArrayList<MeasurementSchema> schemas = new ArrayList<>();
+        schemas.add(new MeasurementSchema("s1", TSDataType.INT32, 
TSEncoding.RLE));
+        schemas.add(new MeasurementSchema("s2", TSDataType.DOUBLE, 
TSEncoding.PLAIN));
+        exceptedTablets.put("root.test_sg.test_d1", new 
Tablet("root.test_sg.test_d1", schemas, 1));
+
+        assertEquals("root.test_sg.test_d1", tablets.keySet().toArray()[0]);
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getSchemas(),
+                tablets.get("root.test_sg.test_d1").getSchemas());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getMaxRowNumber(),
+                tablets.get("root.test_sg.test_d1").getMaxRowNumber());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getTimeBytesSize(),
+                tablets.get("root.test_sg.test_d1").getTimeBytesSize());
+        assertEquals(
+                
exceptedTablets.get("root.test_sg.test_d1").getTotalValueOccupation(),
+                tablets.get("root.test_sg.test_d1").getTotalValueOccupation());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").deviceId,
+                tablets.get("root.test_sg.test_d1").deviceId);
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").rowSize,
+                tablets.get("root.test_sg.test_d1").rowSize);
+    }
+
+    @Test
+    public void testParseTime() {
+        String time = "2022/08/09 10:50:00.000";
+        for (String format : processor.STRING_TIME_FORMAT) {
+            try {
+                DateTimeFormatter.ofPattern(format).parse(time);
+                System.out.println(format);

Review Comment:
   All `System.out.println()` references should be removed.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/test/java/org/apache/nifi/processors/AbstractIoTDBUT.java:
##########
@@ -0,0 +1,318 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processors.model.ValidationResult;
+
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeParseException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class AbstractIoTDBUT {
+    private static TestAbstractIoTDBProcessor processor;
+
+    @Before
+    public void init() {
+        processor = new TestAbstractIoTDBProcessor();
+    }
+
+    @Test
+    public void testValidateSchemaAttribute() {
+        // normal schema
+        String schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        ValidationResult result = 
processor.validateSchemaAttribute(schemaAttribute);
+         assertTrue(result.getKey());
+         assertEquals(null, result.getValue());
+
+        // schema with wrong field
+        schemaAttribute =
+                "{\n"
+                        + "\t\"time\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        String exceptedMsg = "The JSON of schema must contain `timeType` and 
`fields`";
+
+         assertEquals(false, result.getKey());
+         assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong time type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"int\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `timeType`: int, there are only two options `LONG` 
and `STRING` for this property";
+
+         assertEquals(false, result.getKey());
+         assertEquals(exceptedMsg, result.getValue());
+
+        // schema without tsName
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "`tsName` or `dataType` has not been set";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema without data type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "`tsName` or `dataType` has not been set";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong data type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `dataType`: INT. The supported dataTypes are [FLOAT, 
INT64, INT32, TEXT, DOUBLE, BOOLEAN]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong key
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encode\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg = "Unknown property or properties: [encode]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+
+        // schema with wrong compression type
+        schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\",\n"
+                        + "\t\t\"compressionType\": \"ZIP\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\",\n"
+                        + "\t\t\"compressionType\": \"GZIP\"\n"
+                        + "\t}]\n"
+                        + "}";
+
+        result = processor.validateSchemaAttribute(schemaAttribute);
+        exceptedMsg =
+                "Unknown `compressionType`: ZIP, The supported compressionType 
are [LZO, PAA, SDT, UNCOMPRESSED, PLA, LZ4, GZIP, SNAPPY]";
+
+        assertEquals(false, result.getKey());
+        assertEquals(exceptedMsg, result.getValue());
+    }
+
+    @Test
+    public void testParseSchema() {
+        ArrayList<String> filedNames =
+                new ArrayList<String>() {
+                    {
+                        add("root.sg1.d1.s1");
+                        add("root.sg1.d1.s2");
+                        add("root.sg1.d2.s1");
+                    }
+                };
+        Map<String, List<String>> deviceMeasurementMap = 
processor.parseSchema(filedNames);
+        HashMap<String, List<String>> exceptedMap =
+                new HashMap<String, List<String>>() {
+                    {
+                        put(
+                                "root.sg1.d1",
+                                new ArrayList<String>() {
+                                    {
+                                        add("s1");
+                                        add("s2");
+                                    }
+                                });
+                        put(
+                                "root.sg1.d2",
+                                new ArrayList<String>() {
+                                    {
+                                        add("s1");
+                                    }
+                                });
+                    }
+                };
+        assertEquals(exceptedMap, deviceMeasurementMap);
+    }
+
+    @Test
+    public void testGenerateTablet() throws JsonProcessingException {
+        String schemaAttribute =
+                "{\n"
+                        + "\t\"timeType\": \"LONG\",\n"
+                        + "\t\"fields\": [{\n"
+                        + "\t\t\"tsName\": \"s1\",\n"
+                        + "\t\t\"dataType\": \"INT32\",\n"
+                        + "\t\t\"encoding\": \"RLE\"\n"
+                        + "\t}, {\n"
+                        + "\t\t\"tsName\": \"s2\",\n"
+                        + "\t\t\"dataType\": \"DOUBLE\",\n"
+                        + "\t\t\"encoding\": \"PLAIN\"\n"
+                        + "\t}]\n"
+                        + "}";
+        IoTDBSchema schema = new ObjectMapper().readValue(schemaAttribute, 
IoTDBSchema.class);
+        HashMap<String, Tablet> tablets = processor.generateTablets(schema, 
"root.test_sg.test_d1." ,1);
+
+        HashMap<String, Tablet> exceptedTablets = new HashMap<>();
+        ArrayList<MeasurementSchema> schemas = new ArrayList<>();
+        schemas.add(new MeasurementSchema("s1", TSDataType.INT32, 
TSEncoding.RLE));
+        schemas.add(new MeasurementSchema("s2", TSDataType.DOUBLE, 
TSEncoding.PLAIN));
+        exceptedTablets.put("root.test_sg.test_d1", new 
Tablet("root.test_sg.test_d1", schemas, 1));
+
+        assertEquals("root.test_sg.test_d1", tablets.keySet().toArray()[0]);
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getSchemas(),
+                tablets.get("root.test_sg.test_d1").getSchemas());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getMaxRowNumber(),
+                tablets.get("root.test_sg.test_d1").getMaxRowNumber());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").getTimeBytesSize(),
+                tablets.get("root.test_sg.test_d1").getTimeBytesSize());
+        assertEquals(
+                
exceptedTablets.get("root.test_sg.test_d1").getTotalValueOccupation(),
+                tablets.get("root.test_sg.test_d1").getTotalValueOccupation());
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").deviceId,
+                tablets.get("root.test_sg.test_d1").deviceId);
+        assertEquals(
+                exceptedTablets.get("root.test_sg.test_d1").rowSize,
+                tablets.get("root.test_sg.test_d1").rowSize);
+    }
+
+    @Test
+    public void testParseTime() {

Review Comment:
   This test method does not assert any behavior.



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;
+
+        try (final InputStream inputStream = processSession.read(flowFile);
+             final RecordReader recordReader =
+                     recordParserFactory.createRecordReader(flowFile, 
inputStream, getLogger())) {
+            boolean needInitFormatter;
+            IoTDBSchema schema;
+            ValidationResult result;
+
+            result =
+                    schemaProperty != null
+                            ? validateSchemaAttribute(schemaProperty)
+                            : validateSchema(recordReader.getSchema());
+
+            if (!result.getKey()) {
+                getLogger().error("The property `schema` has an error: {}", 
result.getValue());
+                inputStream.close();
+                recordReader.close();
+                processSession.transfer(flowFile, REL_FAILURE);
+                return;
+            } else {
+                if (result.getValue() != null) {
+                    getLogger().warn("The property `schema` has a warn: {}", 
result.getValue());
+                }
+            }
+
+            schema =
+                    schemaProperty != null
+                            ? mapper.readValue(schemaProperty, 
IoTDBSchema.class)
+                            : convertSchema(recordReader.getSchema());
+
+            List<String> fieldNames = schema.getFieldNames(prefix);
+
+            needInitFormatter = schema.getTimeType() != 
IoTDBSchema.TimeType.LONG;
+
+            HashMap<String, Tablet> tablets = generateTablets(schema, prefix, 
maxRowNumber);
+            DateTimeFormatter format = null;
+
+            Record record;
+
+            while ((record = recordReader.nextRecord()) != null) {
+                Object[] values = record.getValues();
+                if (format == null && needInitFormatter) {
+                    format = initFormatter((String) values[0]);
+                    if (format == null) {
+                        getLogger().error("{} Record [{}] time format not 
supported", flowFile, values[0]);
+                        inputStream.close();
+                        recordReader.close();
+                        processSession.transfer(flowFile, REL_FAILURE);
+                        return;
+                    }
+                }
+
+                long timestamp;
+                if (needInitFormatter) {
+                    timestamp = Timestamp
+                            .valueOf(LocalDateTime.parse((String) values[0], 
format))
+                            .getTime();
+                } else {
+                    timestamp = (Long) values[0];
+                }
+
+                boolean isFulled = false;

Review Comment:
   It seems like this should be renamed to `filled`.
   ```suggestion
                   boolean filled = false;
   ```



##########
nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/PutIoTDBRecord.java:
##########
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.processors;
+
+import java.io.InputStream;
+import java.sql.Timestamp;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Set;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.nifi.annotation.lifecycle.OnScheduled;
+import org.apache.nifi.processor.ProcessorInitializationContext;
+import org.apache.nifi.processors.model.IoTDBSchema;
+import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
+import org.apache.iotdb.tsfile.write.record.Tablet;
+import org.apache.iotdb.tsfile.write.schema.MeasurementSchema;
+import org.apache.nifi.annotation.behavior.InputRequirement;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.annotation.lifecycle.OnUnscheduled;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.processor.util.StandardValidators;
+import org.apache.nifi.processors.model.ValidationResult;
+import org.apache.nifi.serialization.RecordReader;
+import org.apache.nifi.serialization.RecordReaderFactory;
+import org.apache.nifi.serialization.record.Record;
+
+@Tags({"iotdb", "insert", "tablet"})
+@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
+@CapabilityDescription(
+        "This is a record aware processor that reads the content of the 
incoming FlowFile as individual records using the "
+                + "configured 'Record Reader' and writes them to Apache IoTDB 
using native interface.")
+public class PutIoTDBRecord extends AbstractIoTDB {
+
+    static final PropertyDescriptor RECORD_READER_FACTORY =
+            new PropertyDescriptor.Builder()
+                    .name("Record Reader")
+                    .description(
+                            "Specifies the type of Record Reader controller 
service to use for parsing the incoming data "
+                                    + "and determining the schema")
+                    .identifiesControllerService(RecordReaderFactory.class)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor SCHEMA =
+            new PropertyDescriptor.Builder()
+                    .name("Schema Template")
+                    .description(
+                            "The Apache IoTDB Schema Template defined using 
JSON.\n" +
+                                    "The Processor will infer the IoTDB Schema 
when this property is not configured.\n" +
+                                    "Besides, you can set encoding type and 
compression type by this method.\n" +
+                                    "If you want to know more detail about 
this, you can browse this link: 
https://iotdb.apache.org/UserGuide/Master/Ecosystem-Integration/NiFi-IoTDB.html";)
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor PREFIX =
+            new PropertyDescriptor.Builder()
+                    .name("Prefix")
+                    .description(
+                            "The Prefix begin with root. that will be add to 
the tsName in data.\n")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(true)
+                    .build();
+
+    static final PropertyDescriptor ALIGNED =
+            new PropertyDescriptor.Builder()
+                    .name("Aligned")
+                    .description("Whether to use the Apache IoTDB Aligned 
Timeseries interface")
+                    .allowableValues("true", "false")
+                    .defaultValue("false")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    static final PropertyDescriptor MAX_ROW_NUMBER =
+            new PropertyDescriptor.Builder()
+                    .name("Max Row Number")
+                    .description(
+                            "Specifies the max row number of each Apache IoTDB 
Tablet")
+                    
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
+                    .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
+                    .required(false)
+                    .build();
+
+    private static final String ROOTPREFIX = "root.";
+
+    @Override
+    protected void init(final ProcessorInitializationContext context) {
+        super.init(context);
+        descriptors.add(RECORD_READER_FACTORY);
+        descriptors.add(SCHEMA);
+        descriptors.add(PREFIX);
+        descriptors.add(ALIGNED);
+        descriptors.add(MAX_ROW_NUMBER);
+    }
+
+    @Override
+    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
+        return descriptors;
+    }
+
+    @Override
+    public Set<Relationship> getRelationships() {
+        return relationships;
+    }
+
+    @OnScheduled
+    public void onScheduled(final ProcessContext context) throws 
IoTDBConnectionException {
+        super.onScheduled(context);
+    }
+
+    @Override
+    public void onTrigger(ProcessContext processContext, ProcessSession 
processSession)
+            throws ProcessException {
+        final RecordReaderFactory recordParserFactory =
+                processContext
+                        .getProperty(RECORD_READER_FACTORY)
+                        .asControllerService(RecordReaderFactory.class);
+
+        FlowFile flowFile = processSession.get();
+
+        if (flowFile == null) {
+            return;
+        }
+
+        String schemaProperty = 
processContext.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
+        String alignedProperty = 
processContext.getProperty(ALIGNED).evaluateAttributeExpressions(flowFile).getValue();
+        String maxRowNumberProperty = 
processContext.getProperty(MAX_ROW_NUMBER).evaluateAttributeExpressions(flowFile).getValue();
+        String prefix = 
processContext.getProperty(PREFIX).evaluateAttributeExpressions(flowFile).getValue();
+
+        if (!prefix.startsWith(ROOTPREFIX) || !prefix.endsWith(".")) {
+            getLogger().error("The prefix is not begin with root and end with 
.", flowFile);
+            processSession.transfer(flowFile, REL_FAILURE);
+        }
+
+        final boolean aligned = alignedProperty != null ? 
Boolean.valueOf(alignedProperty) : false;
+        int maxRowNumber = maxRowNumberProperty != null ? 
Integer.valueOf(maxRowNumberProperty) : 1024;
+
+        try (final InputStream inputStream = processSession.read(flowFile);
+             final RecordReader recordReader =
+                     recordParserFactory.createRecordReader(flowFile, 
inputStream, getLogger())) {
+            boolean needInitFormatter;
+            IoTDBSchema schema;
+            ValidationResult result;
+
+            result =
+                    schemaProperty != null
+                            ? validateSchemaAttribute(schemaProperty)
+                            : validateSchema(recordReader.getSchema());
+
+            if (!result.getKey()) {
+                getLogger().error("The property `schema` has an error: {}", 
result.getValue());
+                inputStream.close();
+                recordReader.close();
+                processSession.transfer(flowFile, REL_FAILURE);
+                return;
+            } else {
+                if (result.getValue() != null) {
+                    getLogger().warn("The property `schema` has a warn: {}", 
result.getValue());
+                }
+            }
+
+            schema =
+                    schemaProperty != null
+                            ? mapper.readValue(schemaProperty, 
IoTDBSchema.class)
+                            : convertSchema(recordReader.getSchema());
+
+            List<String> fieldNames = schema.getFieldNames(prefix);
+
+            needInitFormatter = schema.getTimeType() != 
IoTDBSchema.TimeType.LONG;
+
+            HashMap<String, Tablet> tablets = generateTablets(schema, prefix, 
maxRowNumber);
+            DateTimeFormatter format = null;
+
+            Record record;
+
+            while ((record = recordReader.nextRecord()) != null) {
+                Object[] values = record.getValues();
+                if (format == null && needInitFormatter) {
+                    format = initFormatter((String) values[0]);

Review Comment:
   This behavior requires that the first value element is a timestamp, but that 
is not necessarily guaranteed for incoming records. It seems like there needs 
to be some type of transformation strategy that allows the time field to be 
configurable.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to