cgivre commented on a change in pull request #1530: DRILL-6582: SYSLOG 
(RFC-5424) Format Plugin
URL: https://github.com/apache/drill/pull/1530#discussion_r249632568
 
 

 ##########
 File path: 
contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogRecordReader.java
 ##########
 @@ -0,0 +1,395 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.syslog;
+
+import com.google.common.base.Charsets;
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.exception.OutOfMemoryException;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.ops.OperatorContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.dfs.easy.FileWork;
+import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
+import org.apache.hadoop.fs.Path;
+import org.realityforge.jsyslog.message.StructuredDataParameter;
+import org.realityforge.jsyslog.message.SyslogMessage;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.Map;
+import java.util.Iterator;
+
+public class SyslogRecordReader extends AbstractRecordReader {
+
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(SyslogRecordReader.class);
+  private static final int MAX_RECORDS_PER_BATCH = 8096;
+
+  private final DrillFileSystem fileSystem;
+  private final FileWork fileWork;
+  private final String userName;
+  private BufferedReader reader;
+  private DrillBuf buffer;
+  private VectorContainerWriter writer;
+  private SyslogFormatConfig config;
+  private int maxErrors;
+  private boolean flattenStructuredData;
+  private int errorCount;
+  private int lineCount;
+  private List<SchemaPath> projectedColumns;
+  private String line;
+
+  private SimpleDateFormat df;
+
+
+  public SyslogRecordReader(FragmentContext context,
+                            DrillFileSystem fileSystem,
+                            FileWork fileWork,
+                            List<SchemaPath> columns,
+                            String userName,
+                            SyslogFormatConfig config) throws 
OutOfMemoryException {
+
+    this.fileSystem = fileSystem;
+    this.fileWork = fileWork;
+    this.userName = userName;
+    this.config = config;
+    this.maxErrors = config.getMaxErrors();
+    this.df = getValidDateObject("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+    this.errorCount = 0;
+    this.buffer = context.getManagedBuffer(4096);
+    this.projectedColumns = columns;
+    this.flattenStructuredData = config.getFlattenStructuredData();
+
+
+    setColumns(columns);
+  }
+
+  public void setup(final OperatorContext context, final OutputMutator output) 
throws ExecutionSetupException {
+    openFile();
+    this.writer = new VectorContainerWriter(output);
+  }
+
+  private void openFile() {
+    InputStream in;
+    try {
+      in = fileSystem.open(new Path(fileWork.getPath()));
+    } catch (Exception e) {
+      throw UserException
+          .dataReadError(e)
+          .message("Failed to open open input file: %s", fileWork.getPath())
+          .addContext("User name", this.userName)
+          .build(logger);
+    }
+    this.lineCount = 0;
+    reader = new BufferedReader(new InputStreamReader(in, Charsets.UTF_8));
+  }
+
+  public int next() {
+    this.writer.allocate();
+    this.writer.reset();
+
+    int recordCount = 0;
+
+    try {
+      BaseWriter.MapWriter map = this.writer.rootAsMap();
+      String line = null;
+
+      while (recordCount < MAX_RECORDS_PER_BATCH && (line = 
this.reader.readLine()) != null) {
+        lineCount++;
+
+        // Skip empty lines
+        line = line.trim();
+        if (line.length() == 0) {
+          continue;
+        }
+        this.line = line;
+
+
+        try {
+          SyslogMessage parsedMessage = 
SyslogMessage.parseStructuredSyslogMessage(line);
+
+          this.writer.setPosition(recordCount);
+          map.start();
+
+          if (isStarQuery()) {
+            writeAllColumns(map, parsedMessage);
+          } else {
+            writeProjectedColumns(map, parsedMessage);
+          }
+          map.end();
+          recordCount++;
+
+        } catch (Exception e) {
+          errorCount++;
+          if (errorCount > maxErrors) {
+            throw UserException
+                .dataReadError()
+                .message("Maximum Error Threshold Exceeded: ")
+                .addContext("Line: " + lineCount)
+                .addContext(e.getMessage())
+                .build(logger);
+          }
+        }
+      }
+
+      this.writer.setValueCount(recordCount);
+      return recordCount;
+
+    } catch (final Exception e) {
+      errorCount++;
+      if (errorCount > maxErrors) {
+        throw UserException.dataReadError()
+            .message("Error parsing file")
+            .addContext(e.getMessage())
+            .build(logger);
+      }
+    }
+
+    return recordCount;
+  }
+
+  private void writeAllColumns(BaseWriter.MapWriter map, SyslogMessage 
parsedMessage) {
+    long milliseconds = parsedMessage.getTimestamp().getMillis(); //TODO put 
in try/catch
 
 Review comment:
   It is theoretically possible that the dates in the Syslog file are not 
parsable or otherwise invalid and hence that method will throw exceptions.  
Added an exception.. 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to