paul-rogers commented on a change in pull request #1114: Drill-6104: Added 
Logfile Reader
URL: https://github.com/apache/drill/pull/1114#discussion_r200864565
 
 

 ##########
 File path: 
exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogRecordReader.java
 ##########
 @@ -0,0 +1,750 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.log;
+
+import com.google.common.base.Charsets;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.ops.OperatorContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.dfs.easy.FileWork;
+import org.apache.drill.exec.vector.NullableIntVector;
+import org.apache.drill.exec.vector.NullableBigIntVector;
+import org.apache.drill.exec.vector.NullableSmallIntVector;
+import org.apache.drill.exec.vector.NullableFloat4Vector;
+import org.apache.drill.exec.vector.NullableFloat8Vector;
+import org.apache.drill.exec.vector.BaseValueVector;
+import org.apache.drill.exec.vector.NullableDateVector;
+import org.apache.drill.exec.vector.NullableVarCharVector;
+import org.apache.drill.exec.vector.NullableTimeStampVector;
+import org.apache.drill.exec.vector.NullableTimeVector;
+
+
+
+
+import org.apache.hadoop.fs.Path;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+public class LogRecordReader extends AbstractRecordReader {
+
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(LogRecordReader.class);
+
+  private abstract static class ColumnDefn {
+    public final String name;
+    public final int index;
+    public final String format;
+
+    public ColumnDefn(String name, int index) {
+      this.name = name;
+      this.index = index;
+      this.format = "";
+    }
+
+    public ColumnDefn(String name, int index, String format) {
+      this.name = name;
+      this.index = index;
+      this.format = format;
+    }
+
+    public abstract void define(OutputMutator outputMutator) throws 
SchemaChangeException;
+
+    public abstract void load(int rowIndex, String value);
+
+    @Override
+    //For testing
+    public String toString() {
+      return "Name: " + name + ", Index: " + index;
+    }
+  }
+
+  private static class VarCharDefn extends ColumnDefn {
+
+    private NullableVarCharVector.Mutator mutator;
+
+    public VarCharDefn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.VARCHAR));
+      mutator = outputMutator.addField(field, 
NullableVarCharVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      mutator.set(rowIndex, value.getBytes());
+    }
+  }
+
+  private static class BigIntDefn extends ColumnDefn {
+
+    private NullableBigIntVector.Mutator mutator;
+
+    public BigIntDefn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.BIGINT));
+      mutator = outputMutator.addField(field, 
NullableBigIntVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        mutator.set(rowIndex, Long.parseLong(value));
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an INT field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class SmallIntDefn extends ColumnDefn {
+
+    private NullableSmallIntVector.Mutator mutator;
+
+    public SmallIntDefn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.BIGINT));
+      mutator = outputMutator.addField(field, 
NullableSmallIntVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        mutator.set(rowIndex, Integer.parseInt(value));
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an INT field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class IntDefn extends ColumnDefn {
+
+    private NullableIntVector.Mutator mutator;
+
+    public IntDefn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.INT));
+      mutator = outputMutator.addField(field, 
NullableIntVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        mutator.set(rowIndex, Integer.parseInt(value));
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an INT field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class Float4Defn extends ColumnDefn {
+
+    private NullableFloat4Vector.Mutator mutator;
+
+    public Float4Defn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.FLOAT4));
+      mutator = outputMutator.addField(field, 
NullableFloat4Vector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        mutator.set(rowIndex, Float.parseFloat(value));
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an FLOAT field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class DoubleDefn extends ColumnDefn {
+
+    private NullableFloat8Vector.Mutator mutator;
+
+    public DoubleDefn(String name, int index) {
+      super(name, index);
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.FLOAT8));
+      mutator = outputMutator.addField(field, 
NullableFloat8Vector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        mutator.set(rowIndex, Double.parseDouble(value));
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an FLOAT field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class DateDefn extends ColumnDefn {
+
+    private NullableDateVector.Mutator mutator;
+    private SimpleDateFormat df;
+
+    public DateDefn(String name, int index, String dateFormat) {
+      super(name, index, dateFormat);
+      df = getValidDateObject(dateFormat);
+    }
+
+    private SimpleDateFormat getValidDateObject(String d) {
+      SimpleDateFormat tempDateFormat;
+      if (d != null && !d.isEmpty()) {
+        tempDateFormat = new SimpleDateFormat(d);
+      } else {
+        throw UserException.parseError()
+            .message("Invalid date format.  The date formatting string was 
empty.")
+            .build(logger);
+      }
+      return tempDateFormat;
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.DATE));
+      mutator = outputMutator.addField(field, 
NullableDateVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        Date d = df.parse(value);
+        long milliseconds = d.getTime();
+        mutator.set(rowIndex, milliseconds);
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an DATE field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      } catch (ParseException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Date Format String does not match field value.")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Format String", format)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class TimeDefn extends ColumnDefn {
+
+    private NullableTimeVector.Mutator mutator;
+    private SimpleDateFormat df;
+
+    public TimeDefn(String name, int index, String dateFormat) {
+      super(name, index, dateFormat);
+      df = getValidDateObject(dateFormat);
+    }
+
+    private SimpleDateFormat getValidDateObject(String d) {
+      SimpleDateFormat tempDateFormat;
+      if (d != null && !d.isEmpty()) {
+        tempDateFormat = new SimpleDateFormat(d);
+      } else {
+        throw UserException.parseError()
+            .message("Invalid date format.  The date formatting string was 
empty.")
+            .build(logger);
+      }
+      return tempDateFormat;
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.TIME));
+      mutator = outputMutator.addField(field, 
NullableTimeVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        Date d = df.parse(value);
+        int milliseconds = (int) d.getTime();
+        mutator.set(rowIndex, milliseconds);
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse an Time field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      } catch (ParseException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Date Format String does not match field value.")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Format String", format)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static class TimeStampDefn extends ColumnDefn {
+
+    private NullableTimeStampVector.Mutator mutator;
+    private SimpleDateFormat df;
+
+    public TimeStampDefn(String name, int index, String dateFormat) {
+      super(name, index, dateFormat);
+      df = getValidDateObject(dateFormat);
+    }
+
+    private SimpleDateFormat getValidDateObject(String d) {
+      SimpleDateFormat tempDateFormat;
+      if (d != null && !d.isEmpty()) {
+        tempDateFormat = new SimpleDateFormat(d);
+      } else {
+        throw UserException.parseError()
+            .message("Invalid date format.  The date formatting string was 
empty.")
+            .build(logger);
+      }
+      return tempDateFormat;
+    }
+
+    @Override
+    public void define(OutputMutator outputMutator) throws 
SchemaChangeException {
+      MaterializedField field = MaterializedField.create(name,
+          Types.optional(MinorType.TIMESTAMP));
+      mutator = outputMutator.addField(field, 
NullableTimeStampVector.class).getMutator();
+    }
+
+    @Override
+    public void load(int rowIndex, String value) {
+      try {
+        Date d = df.parse(value);
+        long milliseconds = d.getTime();
+        mutator.set(rowIndex, milliseconds);
+      } catch (NumberFormatException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Failed to parse a TIMESTAMP field")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Value", value)
+            .build(logger);
+      } catch (ParseException e) {
+        throw UserException
+            .dataReadError(e)
+            .addContext("Date Format String does not match field value.")
+            .addContext("Column", name)
+            .addContext("Position", index)
+            .addContext("Format String", format)
+            .addContext("Value", value)
+            .build(logger);
+      }
+    }
+  }
+
+  private static final int BATCH_SIZE = 
BaseValueVector.INITIAL_VALUE_ALLOCATION;
+
+  private final DrillFileSystem dfs;
+  private final FileWork fileWork;
+  private final String userName;
+  private final LogFormatConfig formatConfig;
+  private ColumnDefn columns[];
+  private Pattern pattern;
+  private BufferedReader reader;
+  private int rowIndex;
+  private int capturingGroups;
+  private OutputMutator outputMutator;
+  private int unmatchedColumnIndex;
+  private int unmatchedRowIndex;
+  private boolean unmatchedRows;
+
+
+  private int errorCount;
+
+
+  public LogRecordReader(FragmentContext context, DrillFileSystem dfs,
+                         FileWork fileWork, List<SchemaPath> columns, String 
userName,
+                         LogFormatConfig formatConfig) {
+    this.dfs = dfs;
+    this.fileWork = fileWork;
+    this.userName = userName;
+    this.formatConfig = formatConfig;
+    this.unmatchedColumnIndex = -1;
+    this.unmatchedRowIndex = 0;
+    this.unmatchedRows = false;
+
+    // Ask the superclass to parse the projection list.
+    setColumns(columns);
+  }
+
+  @Override
+  public void setup(final OperatorContext context, final OutputMutator output) 
{
+    this.outputMutator = output;
+
+    setupPattern();
+    openFile();
+    setupProjection();
+    defineVectors();
+  }
+
+  private void setupPattern() {
+    try {
+      this.pattern = Pattern.compile(this.formatConfig.getRegex());
+      Matcher m = pattern.matcher("test");
+      capturingGroups = m.groupCount();
+    } catch (PatternSyntaxException e) {
+      throw UserException
+          .validationError(e)
+          .message("Failed to parse regex: \"%s\"", formatConfig.regex)
+          .build(logger);
+    }
+  }
+
+  private void setupProjection() {
+    if (isSkipQuery()) {
+      projectNone();
+    } else if (isStarQuery()) {
+      projectAll();
+    } else {
+      projectSubset();
+    }
+  }
+
+  private void projectNone() {
+    columns = new ColumnDefn[]{new VarCharDefn("dummy", -1)};
+  }
+
+  private void openFile() {
+    InputStream in;
+    try {
+      in = dfs.open(new Path(fileWork.getPath()));
+    } catch (Exception e) {
+      throw UserException
+          .dataReadError(e)
+          .message("Failed to open open input file: %s", fileWork.getPath())
+          .addContext("User name", userName)
+          .build(logger);
+    }
+    reader = new BufferedReader(new InputStreamReader(in, Charsets.UTF_8));
+  }
+
+  private void projectAll() {
+    List<String> fields = formatConfig.getFieldNames();
+    for (int i = fields.size(); i < capturingGroups; i++) {
+      fields.add("field_" + i);
+    }
+    columns = new ColumnDefn[capturingGroups];
+
+    for (int i = 0; i < capturingGroups; i++) {
+      columns[i] = makeColumn(fields.get(i), i);
+    }
+  }
+
+  private void projectSubset() {
+    Collection<SchemaPath> project = this.getColumns();
+    assert !project.isEmpty();
+    columns = new ColumnDefn[project.size()];
+
+    List<String> fields = formatConfig.getFieldNames();
+    int colIndex = 0;
+
+
+
+    for (SchemaPath column : project) {
+      if (column.getAsNamePart().hasChild()) {
+        throw UserException
+            .validationError()
+            .message("The log format plugin supports only simple columns")
+            .addContext("Projected column", column.toString())
+            .build(logger);
+      }
+
+      String name = column.getAsNamePart().getName();
+
+      //Need this to retrieve unnamed fields
+      Pattern r = Pattern.compile("^field_(\\d+)$");
+      Matcher m = r.matcher(name);
+      int patternIndex = -1;
+
+      if( name.equals("_unmatched_rows")){
+        //Set boolean flag to true
+        this.unmatchedRows = true;
+        this.unmatchedColumnIndex = colIndex;
+      } else if (m.find()) {
+        //if no fields are defined in the configuration, then all the fields 
have names of 'field_n'
+        //Therefore n is the column index
+        patternIndex = Integer.parseInt(m.group(1));
+      } else {
+        for (int i = 0; i < fields.size(); i++) {
+          if (fields.get(i).equalsIgnoreCase(name) ||
+              fields.get(i).equals("_raw") ||
+              fields.get(i).equals("_unmatched_rows")
+              ) {
+            patternIndex = i;
+
+            break;
+          }
+        }
+      }
+      columns[colIndex++] = makeColumn(name, patternIndex);
+    }
+
+  }
+
+  private ColumnDefn makeColumn(String name, int patternIndex) {
+    String typeName = null;
+    if (patternIndex <= -1 || formatConfig.schema == null) {
+      // Use VARCHAR for missing columns
+      // (instead of Drill standard of nullable int)
+      typeName = MinorType.VARCHAR.name();
+    } else if (patternIndex < formatConfig.schema.size()) {
+      typeName = formatConfig.getDataType(patternIndex);
+    }
+    if (typeName == null) {
+      // No type name. VARCHAR is a safe guess
+      typeName = MinorType.VARCHAR.name();
+    }
+    if (name.equals("_raw") || name.equals("_unmatched_rows")) {
+      return new VarCharDefn(name, patternIndex);
+    }
+
+    MinorType type = MinorType.valueOf(typeName);
+    switch (type) {
+      case VARCHAR:
+        return new VarCharDefn(name, patternIndex);
+      case INT:
+        return new IntDefn(name, patternIndex);
+      case SMALLINT:
+        return new SmallIntDefn(name, patternIndex);
+      case BIGINT:
+        return new BigIntDefn(name, patternIndex);
+      case FLOAT4:
+        return new Float4Defn(name, patternIndex);
+      case FLOAT8:
+        return new DoubleDefn(name, patternIndex);
+      case DATE:
+        return new DateDefn(name, patternIndex, 
formatConfig.getDateFormat(patternIndex));
+      case TIMESTAMP:
+        return new TimeStampDefn(name, patternIndex, 
formatConfig.getDateFormat(patternIndex));
+      case TIME:
+        return new TimeDefn(name, patternIndex, 
formatConfig.getDateFormat(patternIndex));
 
 Review comment:
   Per earlier comment, now that you have the field format config, just get 
that object, and pass it into each constructor:
   
   ```
   return new TimeDefn(patternIndex, fieldConfig);
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to