DRILL-3243: Added CSG mods. Fixed field names.
Removed old test files
Added Parse_url and parse_query() functions
Fix unit test

This closes #607


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/4a82bc13
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/4a82bc13
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/4a82bc13

Branch: refs/heads/master
Commit: 4a82bc137865393f6f88b31805ad7c62cc9ca20b
Parents: 46c0f2a
Author: cgivre <[email protected]>
Authored: Wed Oct 5 23:29:26 2016 -0400
Committer: Parth Chandra <[email protected]>
Committed: Tue Nov 1 10:42:22 2016 -0700

----------------------------------------------------------------------
 .../exec/expr/fn/impl/ParseQueryFunction.java   |  87 ++++
 .../exec/expr/fn/impl/ParseUrlFunction.java     | 153 ++++++
 .../exec/store/httpd/HttpdFormatPlugin.java     | 487 -------------------
 .../exec/store/httpd/HttpdLogFormatPlugin.java  |   3 +-
 .../drill/exec/store/httpd/HttpdLogRecord.java  |   2 +-
 .../drill/exec/store/httpd/HttpdParser.java     | 123 ++++-
 .../drill/exec/store/httpd/HttpdParserTest.java |  48 ++
 .../dfs/TestFormatPluginOptionExtractor.java    |   3 +
 .../drill/exec/store/httpd/HttpdParserTest.java |  48 --
 .../store/httpd/TestHttpdLogFormatPlugin.java   |  97 ----
 .../drill/exec/store/httpd/TestHttpdPlugin.java |  31 --
 11 files changed, 411 insertions(+), 671 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseQueryFunction.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseQueryFunction.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseQueryFunction.java
new file mode 100644
index 0000000..7dce1fc
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseQueryFunction.java
@@ -0,0 +1,87 @@
+package org.apache.drill.exec.expr.fn.impl;
+
+//*
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
+
+import javax.inject.Inject;
+
+/* Copyright 2001-2004 The Apache Software Foundation.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+@FunctionTemplate(
+        name="parse_query",
+        scope= FunctionTemplate.FunctionScope.SIMPLE,
+        nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+
+public class ParseQueryFunction implements DrillSimpleFunc {
+
+    @Param
+    NullableVarCharHolder input;
+
+    @Output
+    BaseWriter.ComplexWriter outWriter;
+
+    @Inject
+    DrillBuf outBuffer;
+
+    public void setup() {
+    }
+
+    public void eval() {
+
+        org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter 
queryMapWriter = outWriter.rootAsMap();
+
+        String queryString = 
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start,
 input.end, input.buffer);
+
+        if( queryString.isEmpty() || queryString.equals("null")){
+            queryString = "";
+        }
+
+        String firstLetter = queryString.substring(0, 1);
+
+        //If the first character is a &, it doesn't split properly.  This 
checks to see if the first character is an & and if so, removes it.
+        if(firstLetter.equals("&")){
+            queryString = queryString.substring(1);
+        }
+
+        String[] arguments = queryString.split("&");
+
+        for (int i = 0; i < arguments.length; i++) {
+            String[] queryParts = arguments[i].split("=");
+
+            org.apache.drill.exec.expr.holders.VarCharHolder rowHolder = new 
org.apache.drill.exec.expr.holders.VarCharHolder();
+
+            byte[] rowStringBytes = queryParts[1].getBytes();
+
+            outBuffer.reallocIfNeeded(rowStringBytes.length);
+            outBuffer.setBytes(0, rowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = rowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            queryMapWriter.varChar(queryParts[0]).write(rowHolder);
+
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseUrlFunction.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseUrlFunction.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseUrlFunction.java
new file mode 100644
index 0000000..fa339d4
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/ParseUrlFunction.java
@@ -0,0 +1,153 @@
+package org.apache.drill.exec.expr.fn.impl;
+
+/*
+ * Copyright 2001-2004 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+        name="parse_url",
+        scope= FunctionTemplate.FunctionScope.SIMPLE,
+        nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+
+public class ParseUrlFunction implements DrillSimpleFunc {
+
+    @Param VarCharHolder input;
+
+    @Output BaseWriter.ComplexWriter outWriter;
+
+    @Inject DrillBuf outBuffer;
+
+    public void setup() {}
+
+    public void eval() {
+
+        org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter 
urlMapWriter = outWriter.rootAsMap();
+
+        String urlString = 
org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start,
 input.end, input.buffer);
+
+        try {
+            java.net.URL aURL = new java.net.URL(urlString);
+
+            String protocol = aURL.getProtocol();
+            String authority = aURL.getAuthority();
+            String host = aURL.getHost();
+            java.lang.Integer port = aURL.getPort();
+            String path = aURL.getPath();
+            String query = aURL.getQuery();
+            String filename = aURL.getFile();
+            String ref = aURL.getRef();
+
+            org.apache.drill.exec.expr.holders.VarCharHolder rowHolder = new 
org.apache.drill.exec.expr.holders.VarCharHolder();
+
+            byte[] rowStringBytes = protocol.getBytes();
+
+            outBuffer.reallocIfNeeded(rowStringBytes.length);
+            outBuffer.setBytes(0, rowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = rowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("protocol").write(rowHolder);
+
+
+            byte[] authRowStringBytes = authority.getBytes();
+
+            outBuffer.reallocIfNeeded(authRowStringBytes.length);
+            outBuffer.setBytes(0, authRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = authRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("authority").write(rowHolder);
+
+
+            byte[] hostRowStringBytes = host.getBytes();
+
+            outBuffer.reallocIfNeeded(hostRowStringBytes.length);
+            outBuffer.setBytes(0, hostRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = hostRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("host").write(rowHolder);
+
+
+            byte[] pathRowStringBytes = path.getBytes();
+
+            outBuffer.reallocIfNeeded(pathRowStringBytes.length);
+            outBuffer.setBytes(0, pathRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = pathRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("path").write(rowHolder);
+
+
+            byte[] queryRowStringBytes = query.getBytes();
+
+            outBuffer.reallocIfNeeded(queryRowStringBytes.length);
+            outBuffer.setBytes(0, queryRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = queryRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("query").write(rowHolder);
+
+
+            byte[] filenameRowStringBytes = filename.getBytes();
+
+            outBuffer.reallocIfNeeded(filenameRowStringBytes.length);
+            outBuffer.setBytes(0, filenameRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = filenameRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("filename").write(rowHolder);
+
+
+            byte[] refRowStringBytes = ref.getBytes();
+
+            outBuffer.reallocIfNeeded(refRowStringBytes.length);
+            outBuffer.setBytes(0, refRowStringBytes);
+
+            rowHolder.start = 0;
+            rowHolder.end = refRowStringBytes.length;
+            rowHolder.buffer = outBuffer;
+
+            urlMapWriter.varChar("ref").write(rowHolder);
+
+            org.apache.drill.exec.expr.holders.IntHolder intHolder = new 
org.apache.drill.exec.expr.holders.IntHolder();
+            intHolder.value = port;
+            urlMapWriter.integer("port").write(intHolder);
+        }
+        catch (Exception e ) {}
+    }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdFormatPlugin.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdFormatPlugin.java
deleted file mode 100644
index 7b8dc0e..0000000
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdFormatPlugin.java
+++ /dev/null
@@ -1,487 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.httpd;
-
-import io.netty.buffer.DrillBuf;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import nl.basjes.parse.core.Parser;
-import nl.basjes.parse.core.exceptions.DissectionFailure;
-import nl.basjes.parse.core.exceptions.InvalidDissectorException;
-import nl.basjes.parse.core.exceptions.MissingDissectorsException;
-import nl.basjes.parse.httpdlog.ApacheHttpdLogFormatDissector;
-import nl.basjes.parse.httpdlog.dissectors.HttpFirstLineDissector;
-
-import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.logical.FormatPluginConfig;
-import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.ops.FragmentContext;
-import org.apache.drill.exec.ops.OperatorContext;
-import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.store.AbstractRecordReader;
-import org.apache.drill.exec.store.RecordWriter;
-import org.apache.drill.exec.store.dfs.DrillFileSystem;
-import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
-import org.apache.drill.exec.store.dfs.easy.EasyWriter;
-import org.apache.drill.exec.store.dfs.easy.FileWork;
-import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
-import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
-import org.apache.drill.exec.vector.complex.writer.BigIntWriter;
-import org.apache.drill.exec.vector.complex.writer.Float8Writer;
-import org.apache.drill.exec.vector.complex.writer.VarCharWriter;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.LineRecordReader;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TextInputFormat;
-
-import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.google.common.base.Charsets;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
-public class HttpdFormatPlugin extends 
EasyFormatPlugin<HttpdFormatPlugin.HttpdLogFormatConfig> {
-
-  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(HttpdFormatPlugin.class);
-
-  private static final String DEFAULT_EXTENSION = "httpd";
-
-  public HttpdFormatPlugin(
-      String name,
-      DrillbitContext context,
-      Configuration fsConf,
-      StoragePluginConfig storageConfig,
-      HttpdLogFormatConfig formatConfig) {
-    super(name, context, fsConf, storageConfig, formatConfig, true, false, 
true, true,
-        Lists.newArrayList(DEFAULT_EXTENSION), DEFAULT_EXTENSION);
-  }
-
-  @JsonTypeName("httpd")
-  public static class HttpdLogFormatConfig implements FormatPluginConfig {
-    public String format;
-  }
-
-  private class RecordReader extends AbstractRecordReader {
-
-    private final DrillFileSystem fs;
-    private final FileWork work;
-    private final FragmentContext fragmentContext;
-
-    private ComplexWriter writer;
-    private Parser<ComplexWriterFacade> parser;
-    private LineRecordReader lineReader;
-    private LongWritable lineNumber;
-    private ComplexWriterFacade record;
-    private DrillBuf managedBuffer;
-
-    public RecordReader(FragmentContext context, DrillFileSystem fs, FileWork 
work) {
-      this.fs = fs;
-      this.work = work;
-      fragmentContext = context;
-      managedBuffer = context.getManagedBuffer();
-    }
-
-    @Override
-    public void setup(OperatorContext context, OutputMutator output) throws 
ExecutionSetupException {
-
-      try {
-
-        parser = new PartiallyDissectedParser<ComplexWriterFacade>(
-            ComplexWriterFacade.class,
-            HttpdFormatPlugin.this.getConfig().format);
-        writer = new VectorContainerWriter(output);
-        record = new ComplexWriterFacade(writer);
-        record.addAsParseTarget(parser);
-
-        final Path path = fs.makeQualified(new Path(work.getPath()));
-        FileSplit split = new FileSplit(path, work.getStart(), 
work.getLength(), new String[] { "" });
-        TextInputFormat inputFormat = new TextInputFormat();
-        JobConf job = new JobConf(fs.getConf());
-        job.setInt("io.file.buffer.size", fragmentContext.getConfig()
-            .getInt(ExecConstants.TEXT_LINE_READER_BUFFER_SIZE));
-        job.setInputFormat(inputFormat.getClass());
-        lineReader = (LineRecordReader) inputFormat.getRecordReader(split, 
job, Reporter.NULL);
-        lineNumber = lineReader.createKey();
-      } catch (Exception e) {
-        throw handleAndGenerate("Failure in creating record reader", e);
-      }
-
-
-    }
-
-    private DrillBuf buf(int size) {
-      if (managedBuffer.capacity() < size) {
-        managedBuffer = managedBuffer.reallocIfNeeded(size);
-      }
-      return managedBuffer;
-    }
-
-    protected RuntimeException handleAndGenerate(String s, Exception e) {
-      throw UserException.dataReadError(e)
-          .message(s + "\n%s", e.getMessage())
-          .addContext("Path", work.getPath())
-          .addContext("Split Start", work.getStart())
-          .addContext("Split Length", work.getLength())
-          .addContext("Local Line Number", lineNumber.get())
-          .build(logger);
-    }
-
-    @Override
-    public int next() {
-      try {
-        final Text currentText = lineReader.createValue();
-
-        writer.allocate();
-        writer.reset();
-        int recordCount = 0;
-
-        for (; recordCount < 4095 && lineReader.next(lineNumber, currentText); 
recordCount++) {
-          writer.setPosition(recordCount);
-          parser.parse(record, currentText.toString());
-        }
-
-        writer.setValueCount(recordCount);
-        return recordCount;
-      } catch (DissectionFailure | InvalidDissectorException | 
MissingDissectorsException | IOException e) {
-        throw handleAndGenerate("Failure while reading httpd log record.", e);
-      }
-    }
-
-    @Override
-    public void cleanup() {
-
-      try {
-        if (lineReader != null) {
-          lineReader.close();
-        }
-      } catch (IOException e) {
-        logger.warn("Failure while closing Httpd reader.", e);
-      }
-    }
-
-    /**
-     * Maps Httpd Log Libraries calls to Drills ComplexWriter interface.
-     */
-    public class ComplexWriterFacade {
-      private final ComplexWriter writer;
-      private final Map<String, VarCharWriter> stringWriters = 
Maps.newHashMap();
-      private final Map<String, BigIntWriter> longWriters = Maps.newHashMap();
-      private final Map<String, Float8Writer> doubleWriters = 
Maps.newHashMap();
-
-      private ComplexWriterFacade(ComplexWriter writer) {
-        this.writer = writer;
-      }
-
-      @SuppressWarnings("unused")
-      public void set(final String name, final String value) {
-        if (value != null) {
-          final byte[] stringBytes = value.getBytes(Charsets.UTF_8);
-          final DrillBuf stringBuffer = buf(stringBytes.length);
-          stringBuffer.clear();
-          stringBuffer.writeBytes(stringBytes);
-          final VarCharWriter writer = stringWriters.get(name);
-          if (writer != null) {
-            writer.writeVarChar(0, stringBytes.length, stringBuffer);
-          } else {
-            logger.warn("Dropped string.  Name: {}, Value: {}", name, value);
-          }
-        }
-      }
-
-      @SuppressWarnings("unused")
-      public void set(String name, Long value) {
-        if (value != null) {
-          longWriters.get(name).writeBigInt(value);
-        }
-      }
-
-      @SuppressWarnings("unused")
-      public void set(String name, Double value) {
-        if (value != null) {
-          doubleWriters.get(name).writeFloat8(value);
-        }
-      }
-
-      private void add(Parser<ComplexWriterFacade> parser, String path, 
VarCharWriter writer)
-          throws NoSuchMethodException,
-          SecurityException {
-        stringWriters.put(path, writer);
-        parser.addParseTarget(
-            ComplexWriterFacade.class.getMethod("set", new Class[] { 
String.class, String.class }),
-            path);
-      }
-
-      @SuppressWarnings("unused")
-      private void add(Parser<ComplexWriterFacade> parser, String path, 
Float8Writer writer)
-          throws NoSuchMethodException,
-          SecurityException {
-        doubleWriters.put(path, writer);
-        parser.addParseTarget(
-            ComplexWriterFacade.class.getMethod("set", new Class[] { 
String.class, Double.class }),
-            path);
-      }
-
-      private void add(Parser<ComplexWriterFacade> parser, String path, 
BigIntWriter writer)
-          throws NoSuchMethodException,
-          SecurityException {
-        longWriters.put(path, writer);
-        parser.addParseTarget(
-            ComplexWriterFacade.class.getMethod("set", new Class[] { 
String.class, Long.class }),
-            path);
-      }
-
-      public void addAsParseTarget(Parser<ComplexWriterFacade> parser) {
-        try {
-
-          for (final String path : parser.getPossiblePaths()) {
-            switch (path) {
-            case "IP:connection.client.ip":
-              add(parser, path, 
writer.rootAsMap().map("client").varChar("ip"));
-              break;
-            case "IP:connection.client.peerip":
-              add(parser, path, 
writer.rootAsMap().map("client").varChar("peer_ip"));
-              break;
-            case "IP:connection.server.ip":
-              add(parser, path, 
writer.rootAsMap().map("server").varChar("ip"));
-              break;
-            case "BYTES:response.body.bytes":
-              add(parser, path, 
writer.rootAsMap().map("response").bigInt("bytes"));
-              break;
-            case "BYTES:response.body.bytesclf":
-              add(parser, path, 
writer.rootAsMap().map("response").bigInt("bytes"));
-              break;
-            case "HTTP.COOKIE:request.cookies.":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("cookies"));
-              break;
-            case "MICROSECONDS:server.process.time":
-              add(parser, path, 
writer.rootAsMap().map("response").bigInt("process_time"));
-              break;
-            case "FILENAME:server.filename":
-              add(parser, path, 
writer.rootAsMap().map("response").varChar("filename"));
-              break;
-            case "IP:connection.client.host":
-              add(parser, path, 
writer.rootAsMap().map("client").varChar("host"));
-              break;
-            case "PROTOCOL:request.protocol":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("protocol"));
-              break;
-            case "HTTP.HEADER:request.header.":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("header"));
-              break;
-            case "NUMBER:connection.keepalivecount":
-              add(parser, path, 
writer.rootAsMap().map("client").bigInt("keepalivecount"));
-              break;
-            case "NUMBER:connection.client.logname":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("logname"));
-              break;
-            case "STRING:request.errorlogid":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("errorlogid"));
-              break;
-            case "HTTP.METHOD:request.method":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("method"));
-              break;
-            case "PORT:request.server.port.canonical":
-              add(parser, path, 
writer.rootAsMap().map("server").bigInt("canonical_port"));
-              break;
-            case "PORT:connection.server.port.canonical":
-              add(parser, path, 
writer.rootAsMap().map("server").bigInt("canonical_port"));
-              break;
-            case "PORT:connection.client.port":
-              add(parser, path, 
writer.rootAsMap().map("client").bigInt("port"));
-              break;
-            case "NUBMER:connection.server.child.processid":
-              add(parser, path, 
writer.rootAsMap().map("server").bigInt("process_id"));
-              break;
-            case "NUMBER:connection.server.child.threadid":
-              add(parser, path, 
writer.rootAsMap().map("server").bigInt("thread_id"));
-              break;
-            case "STRING:connection.server.child.hexthreadid":
-              add(parser, path, 
writer.rootAsMap().map("connection").varChar("hex_thread_id"));
-              break;
-            case "HTTP.QUERYSTRING:request.querystring":
-              add(parser, path, writer.rootAsMap().map("").varChar(""));
-              break;
-            case "HTTP.FIRSTLINE:request.firstline":
-              add(parser, path, writer.rootAsMap().map("").varChar(""));
-              break;
-            case "STRING:request.handler":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("handler"));
-              break;
-            case "STRING:request.status.original":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("status_original"));
-              break;
-            case "STRING:request.status.last":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("status_last"));
-              break;
-            case "TIME.STAMP:request.receive.time":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("timestamp"));
-              break;
-            case "TIME.EPOCH:request.receive.time.begin.msec":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("begin_msec"));
-              break;
-            case "TIME.EPOCH:request.receive.time.end.msec":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("end_msec"));
-              break;
-            case "TIME.EPOCH.USEC:request.receive.time.begin.usec":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("begin_usec"));
-              break;
-            case "TIME.EPOCH.USEC:request.receive.time.end.usec":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("end_usec"));
-              break;
-            case "TIME.EPOCH:request.receive.time.begin.msec_frac":
-              add(parser, path, 
writer.rootAsMap().map("request").bigInt("begin_msec_frac"));
-              break;
-            case "TIME.EPOCH:request.receive.time.end.msec_frac":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("end_msec_frac"));
-              break;
-            case "TIME.EPOCH.USEC_FRAC:request.receive.time.begin.usec_frac":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("begin_usec_frac"));
-              break;
-            case "TIME.EPOCH.USEC_FRAC:request.receive.time.end.usec_frac":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("end_usec_frac"));
-              break;
-            case "SECONDS:response.server.processing.time":
-              add(parser, path, 
writer.rootAsMap().map("response").varChar("processing_time"));
-              break;
-            case "STRING:connection.client.user":
-              add(parser, path, 
writer.rootAsMap().map("client").varChar("user"));
-              break;
-            case "URI:request.urlpath":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("url"));
-              break;
-            case "STRING:connection.server.name.canonical":
-              add(parser, path, 
writer.rootAsMap().map("server").varChar("canonical_name"));
-              break;
-            case "STRING:connection.server.name":
-              add(parser, path, 
writer.rootAsMap().map("server").varChar("name"));
-              break;
-            case "HTTP.CONNECTSTATUS:response.connection.status":
-              add(parser, path, 
writer.rootAsMap().map("response").varChar("connection_status"));
-              break;
-            case "BYTES:request.bytes":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("bytes"));
-              break;
-            case "BYTES:response.bytes":
-              add(parser, path, 
writer.rootAsMap().map("response").bigInt("bytes"));
-              break;
-            case "HTTP.COOKIES:request.cookies":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("cookies"));
-              break;
-            case "HTTP.SETCOOKIES:response.cookies":
-              add(parser, path, 
writer.rootAsMap().map("response").varChar("cookies"));
-              break;
-            case "HTTP.USERAGENT:request.user-agent":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("useragent"));
-              break;
-            case "HTTP.URI:request.referer":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("referer"));
-              break;
-            case "HTTP.METHOD:method":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("method"));
-              break;
-            case "HTTP.URI:uri":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("uri"));
-              break;
-            case "HTTP.PROTOCOL:protocol":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("protocol"));
-              break;
-            case "HTTP.PROTOCOL.VERSION:protocol.version":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("protocol_version"));
-              break;
-            case "HTTP.METHOD:request.firstline.method":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("method"));
-              break;
-            case "HTTP.URI:request.firstline.uri":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("uri"));
-              break;
-            case "HTTP.PROTOCOL:request.firstline.protocol":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("protocol"));
-              break;
-            case "HTTP.PROTOCOL.VERSION:request.firstline.protocol.version":
-              add(parser, path, 
writer.rootAsMap().map("request").varChar("protocol_version"));
-              break;
-            default:
-
-              // if we don't know what to do, just write the raw value.
-              parser.addParseTarget(
-                  ComplexWriterFacade.class.getMethod("set", new Class[] { 
String.class, String.class }),
-                  path);
-              final String noPeriodPath = path.replace(".", "_");
-              stringWriters.put(path, 
writer.rootAsMap().varChar(noPeriodPath));
-              break;
-
-            }
-          }
-
-
-        } catch (MissingDissectorsException | SecurityException | 
NoSuchMethodException | InvalidDissectorException e) {
-          throw handleAndGenerate("Failure while setting up log mappings.", e);
-        }
-      }
-    }
-  }
-
-  @Override
-  public boolean supportsPushDown() {
-    return true;
-  }
-
-
-  @Override
-  public RecordReader getRecordReader(FragmentContext context, DrillFileSystem 
dfs,
-      FileWork fileWork, List<SchemaPath> columns) throws 
ExecutionSetupException {
-    return new RecordReader(context, dfs, fileWork);
-  }
-
-  @Override
-  public RecordWriter getRecordWriter(FragmentContext context, EasyWriter 
writer) throws IOException {
-    throw new UnsupportedOperationException("Drill doesn't currently support 
writing to HTTPD logs.");
-  }
-
-  @Override
-  public int getReaderOperatorType() {
-    return -1;
-  }
-
-  @Override
-  public int getWriterOperatorType() {
-    return -1;
-  }
-
-  private class PartiallyDissectedParser<RECORD> extends Parser<RECORD> {
-    public PartiallyDissectedParser(Class<RECORD> clazz, final String 
logformat) {
-      super(clazz);
-
-      addDissector(new ApacheHttpdLogFormatDissector(logformat));
-      addDissector(new HttpFirstLineDissector());
-      setRootType(ApacheHttpdLogFormatDissector.INPUT_TYPE);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
index 7a83d45..a6dc7d9 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
@@ -1,3 +1,4 @@
+
 /**
  * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE
  * file distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file
@@ -243,4 +244,4 @@ public class HttpdLogFormatPlugin extends 
EasyFormatPlugin<HttpdLogFormatPlugin.
   public int getWriterOperatorType() {
     return -1;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogRecord.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogRecord.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogRecord.java
index 03f70c1..2775285 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogRecord.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogRecord.java
@@ -296,4 +296,4 @@ public class HttpdLogRecord {
       strings.put(parserFieldName, mapWriter.varChar(drillFieldName));
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParser.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParser.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParser.java
index 114a7f4..a896638 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParser.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParser.java
@@ -18,9 +18,6 @@ package org.apache.drill.exec.store.httpd;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import io.netty.buffer.DrillBuf;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Map;
 import nl.basjes.parse.core.Casts;
 import nl.basjes.parse.core.Parser;
 import nl.basjes.parse.core.exceptions.DissectionFailure;
@@ -31,6 +28,11 @@ import 
org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 public class HttpdParser {
 
   private static final Logger LOG = LoggerFactory.getLogger(HttpdParser.class);
@@ -41,6 +43,105 @@ public class HttpdParser {
   private final Parser<HttpdLogRecord> parser;
   private final HttpdLogRecord record;
 
+    public static final HashMap<String, String> LOGFIELDS = new 
HashMap<String, String>();
+    static
+    {
+        LOGFIELDS.put("request_receive_time_weekyear__utc", 
"TIME_YEAR:request_receive_time_weekyear__utc");
+        LOGFIELDS.put("request_referer_ref", "HTTP_REF:request_referer_ref");
+        LOGFIELDS.put("request_referer_protocol", 
"HTTP_PROTOCOL:request_referer_protocol");
+        LOGFIELDS.put("request_receive_time_timezone", 
"TIME_ZONE:request_receive_time_timezone");
+        LOGFIELDS.put("connection_client_host", "IP:connection_client_host");
+        LOGFIELDS.put("connection_client_ip", "IP:connection_client_ip");
+        LOGFIELDS.put("connection_client_peerip", 
"IP:connection_client_peerip");
+        LOGFIELDS.put("connection_server_ip", "IP:connection_server_ip");
+        LOGFIELDS.put("request_receive_time_day", 
"TIME_DAY:request_receive_time_day");
+        LOGFIELDS.put("request_receive_time_minute__utc", 
"TIME_MINUTE:request_receive_time_minute__utc");
+        LOGFIELDS.put("request_referer_query_$", 
"STRING:request_referer_query_$");
+        LOGFIELDS.put("request_receive_time_millisecond__utc", 
"TIME_MILLISECOND:request_receive_time_millisecond__utc");
+        LOGFIELDS.put("request_firstline_uri_port", 
"HTTP_PORT:request_firstline_uri_port");
+        LOGFIELDS.put("request_referer_userinfo", 
"HTTP_USERINFO:request_referer_userinfo");
+        LOGFIELDS.put("request_receive_time_second__utc", 
"TIME_SECOND:request_receive_time_second__utc");
+        LOGFIELDS.put("request_firstline_uri_protocol", 
"HTTP_PROTOCOL:request_firstline_uri_protocol");
+        LOGFIELDS.put("request_receive_time_month", 
"TIME_MONTH:request_receive_time_month");
+        LOGFIELDS.put("request_firstline_uri_query", 
"HTTP_QUERYSTRING:request_firstline_uri_query");
+        LOGFIELDS.put("request_firstline_uri_path", 
"HTTP_PATH:request_firstline_uri_path");
+        LOGFIELDS.put("request_receive_time_hour__utc", 
"TIME_HOUR:request_receive_time_hour__utc");
+        LOGFIELDS.put("request_receive_time_monthname", 
"TIME_MONTHNAME:request_receive_time_monthname");
+        LOGFIELDS.put("request_receive_time_year__utc", 
"TIME_YEAR:request_receive_time_year__utc");
+        LOGFIELDS.put("request_receive_time_second", 
"TIME_SECOND:request_receive_time_second");
+        LOGFIELDS.put("request_referer", "HTTP_URI:request_referer");
+        LOGFIELDS.put("request_receive_time_monthname__utc", 
"TIME_MONTHNAME:request_receive_time_monthname__utc");
+        LOGFIELDS.put("request_referer_path", 
"HTTP_PATH:request_referer_path");
+        LOGFIELDS.put("request_receive_time_weekyear", 
"TIME_YEAR:request_receive_time_weekyear");
+        LOGFIELDS.put("request_firstline_protocol", 
"HTTP_PROTOCOL:request_firstline_protocol");
+        LOGFIELDS.put("request_referer_port", 
"HTTP_PORT:request_referer_port");
+        LOGFIELDS.put("request_receive_time_minute", 
"TIME_MINUTE:request_receive_time_minute");
+        LOGFIELDS.put("request_status_last", "STRING:request_status_last");
+        LOGFIELDS.put("request_receive_time_hour", 
"TIME_HOUR:request_receive_time_hour");
+        LOGFIELDS.put("request_firstline_protocol_version", 
"HTTP_PROTOCOL_VERSION:request_firstline_protocol_version");
+        LOGFIELDS.put("request_receive_time", 
"TIME_STAMP:request_receive_time");
+        LOGFIELDS.put("request_firstline_method", 
"HTTP_METHOD:request_firstline_method");
+        LOGFIELDS.put("request_receive_time_epoch", 
"TIME_EPOCH:request_receive_time_epoch");
+        LOGFIELDS.put("request_receive_time_weekofweekyear", 
"TIME_WEEK:request_receive_time_weekofweekyear");
+        LOGFIELDS.put("request_firstline_uri_host", 
"HTTP_HOST:request_firstline_uri_host");
+        LOGFIELDS.put("request_referer_query", 
"HTTP_QUERYSTRING:request_referer_query");
+        LOGFIELDS.put("request_firstline_uri_userinfo", 
"HTTP_USERINFO:request_firstline_uri_userinfo");
+        LOGFIELDS.put("response_body_bytes", "BYTES:response_body_bytes");
+        LOGFIELDS.put("response_body_bytesclf", 
"BYTES:response_body_bytesclf");
+        LOGFIELDS.put("request_referer_host", 
"HTTP_HOST:request_referer_host");
+        LOGFIELDS.put("request_receive_time_weekofweekyear__utc", 
"TIME_WEEK:request_receive_time_weekofweekyear__utc");
+        LOGFIELDS.put("request_firstline_uri", 
"HTTP_URI:request_firstline_uri");
+        LOGFIELDS.put("request_firstline_uri_ref", 
"HTTP_REF:request_firstline_uri_ref");
+        LOGFIELDS.put("request_receive_time_year", 
"TIME_YEAR:request_receive_time_year");
+        LOGFIELDS.put("request_firstline", "HTTP_FIRSTLINE:request_firstline");
+        LOGFIELDS.put("request_user-agent", 
"HTTP_USERAGENT:request_user-agent");
+        LOGFIELDS.put("request_cookies", "HTTP_COOKIE:request_cookies");
+        LOGFIELDS.put("server_process_time", 
"MICROSECONDS:server_process_time");
+        LOGFIELDS.put("request_cookies_$", "HTTP_COOKIE:request_cookies_$");
+        LOGFIELDS.put("server_environment_$", "VARIABLE:server_environment_$");
+        LOGFIELDS.put("server_filename", "FILENAME:server_filename");
+        LOGFIELDS.put("request_protocol", "PROTOCOL:request_protocol");
+        LOGFIELDS.put("request_header_", "HTTP_HEADER:request_header_");
+        LOGFIELDS.put("connection_keepalivecount", 
"NUMBER:connection_keepalivecount");
+        LOGFIELDS.put("connection_client_logname", 
"NUMBER:connection_client_logname");
+        LOGFIELDS.put("request_errorlogid", "STRING:request_errorlogid");
+        LOGFIELDS.put("request_method", "HTTP_METHOD:request_method");
+        LOGFIELDS.put("server_module_note_$", "STRING:server_module_note_$");
+        LOGFIELDS.put("response_header_$", "HTTP_HEADER:response_header_$");
+        LOGFIELDS.put("request_server_port_canonical", 
"PORT:request_server_port_canonical");
+        LOGFIELDS.put("connection_server_port_canonical", 
"PORT:connection_server_port_canonical");
+        LOGFIELDS.put("connection_server_port", "PORT:connection_server_port");
+        LOGFIELDS.put("connection_client_port", "PORT:connection_client_port");
+        LOGFIELDS.put("connection_server_child_processid", 
"NUMBER:connection_server_child_processid");
+        LOGFIELDS.put("connection_server_child_threadid", 
"NUMBER:connection_server_child_threadid");
+        LOGFIELDS.put("connection_server_child_hexthreadid", 
"NUMBER:connection_server_child_hexthreadid");
+        LOGFIELDS.put("request_querystring", 
"HTTP_QUERYSTRING:request_querystring");
+        LOGFIELDS.put("request_handler", "STRING:request_handler");
+        LOGFIELDS.put("request_status_original", 
"STRING:request_status_original");
+        LOGFIELDS.put("request_status_last", "STRING:request_status_last");
+        LOGFIELDS.put("request_receive_time_begin_msec", 
"TIME_EPOCH:request_receive_time_begin_msec");
+        LOGFIELDS.put("request_receive_time_end_msec", 
"TIME_EPOCH:request_receive_time_end_msec");
+        LOGFIELDS.put("request_receive_time_begin_usec", 
"TIME_EPOCH_USEC:request_receive_time_begin_usec");
+        LOGFIELDS.put("request_receive_time_begin_usec", 
"TIME_EPOCH_USEC:request_receive_time_begin_usec");
+        LOGFIELDS.put("request_receive_time_end_usec", 
"TIME_EPOCH_USEC:request_receive_time_end_usec");
+        LOGFIELDS.put("request_receive_time_begin_msec_frac", 
"TIME_EPOCH:request_receive_time_begin_msec_frac");
+        LOGFIELDS.put("request_receive_time_begin_msec_frac", 
"TIME_EPOCH:request_receive_time_begin_msec_frac");
+        LOGFIELDS.put("request_receive_time_end_msec_frac", 
"TIME_EPOCH:request_receive_time_end_msec_frac");
+        LOGFIELDS.put("request_receive_time_begin_usec_frac", 
"TIME_EPOCH_USEC_FRAC:request_receive_time_begin_usec_frac");
+        LOGFIELDS.put("request_receive_time_begin_usec_frac", 
"TIME_EPOCH_USEC_FRAC:request.receive.time.begin.usec_frac");
+        LOGFIELDS.put("request_receive_time_end_usec_frac", 
"TIME_EPOCH_USEC_FRAC:request_receive_time_end_usec_frac");
+        LOGFIELDS.put("response_server_processing_time", 
"SECONDS:response_server_processing_time");
+        LOGFIELDS.put("connection_client_user", 
"STRING:connection_client_user");
+        LOGFIELDS.put("request_urlpath", "URI:request_urlpath");
+        LOGFIELDS.put("connection_server_name_canonical", 
"STRING:connection_server_name_canonical");
+        LOGFIELDS.put("connection_server_name", 
"STRING:connection_server_name");
+        LOGFIELDS.put("response_connection_status", 
"HTTP_CONNECTSTATUS:response_connection_status");
+        LOGFIELDS.put("request_bytes", "BYTES:request_bytes");
+        LOGFIELDS.put("response_bytes", "BYTES:response_bytes");
+    }
+
+    //Map map = Collections.synchronizedMap(LOGFIELDS);
+
   public HttpdParser(final MapWriter mapWriter, final DrillBuf managedBuffer, 
final String logFormat,
       final String timestampFormat, final Map<String, String> fieldMapping)
       throws NoSuchMethodException, MissingDissectorsException, 
InvalidDissectorException {
@@ -96,7 +197,10 @@ public class HttpdParser {
    * @return
    */
   public static String parserFormattedFieldName(final String drillFieldName) {
-    return drillFieldName.replace(SAFE_WILDCARD, 
PARSER_WILDCARD).replaceAll(SAFE_SEPARATOR, ".").replaceAll("\\.\\.", "_");
+      String tempFieldName;
+      tempFieldName = LOGFIELDS.get(drillFieldName);
+      return tempFieldName.replace(SAFE_WILDCARD, 
PARSER_WILDCARD).replaceAll(SAFE_SEPARATOR, ".").replaceAll("\\.\\.", "_");
+
   }
 
   /**
@@ -108,7 +212,14 @@ public class HttpdParser {
    * @return
    */
   public static String drillFormattedFieldName(final String parserFieldName) {
-    return parserFieldName.replaceAll("_", "__").replace(PARSER_WILDCARD, 
SAFE_WILDCARD).replaceAll("\\.", SAFE_SEPARATOR);
+
+      if (parserFieldName.contains(":") ) {
+        String[] fieldPart= parserFieldName.split(":");
+        return fieldPart[1].replaceAll("_", "__").replace(PARSER_WILDCARD, 
SAFE_WILDCARD).replaceAll("\\.", SAFE_SEPARATOR);
+        }
+    else{
+      return parserFieldName.replaceAll("_", "__").replace(PARSER_WILDCARD, 
SAFE_WILDCARD).replaceAll("\\.", SAFE_SEPARATOR);
+    }
   }
 
   private void setupParser(final MapWriter mapWriter, final String logFormat, 
final Map<String, String> fieldMapping)
@@ -168,4 +279,4 @@ public class HttpdParser {
       record.addField(parser, mapWriter, casts, entry.getValue(), 
entry.getKey());
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
new file mode 100644
index 0000000..b82b1ee
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2015 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.httpd;
+
+import io.netty.buffer.DrillBuf;
+import java.util.Map;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HttpdParserTest {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(HttpdParserTest.class);
+
+  private void runTest(String logFormat, String logLine) throws Exception {
+    MapWriter mapWriter = null;
+    DrillBuf managedBuffer = null;
+    Map<String, String> configuredFields = null;
+    HttpdParser parser = new HttpdParser(mapWriter, managedBuffer, logFormat, 
null, configuredFields);
+    parser.parse(logLine);
+  }
+
+//  @Test
+  public void testFirstPattern() throws Exception {
+    LOG.info("testFirstPattern");
+//    final String format = "common";
+//    final String format = "%h %l %u %t \"%r\" %>s %b";
+    final String format = "%h %t \"%r\" %>s %b \"%{Referer}i\"";
+    final String line = "127.0.0.1 [31/Dec/2012:23:49:41 +0100] "
+        + "\"GET /foo HTTP/1.1\" 200 "
+        + "1213 \"http://localhost/index.php?mies=wim\"";;
+    runTest(format, line);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
index c341295..2cbc09a 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
@@ -61,6 +61,9 @@ public class TestFormatPluginOptionExtractor {
         case "avro":
           assertEquals(d.typeName, "(type: String)", d.presentParams());
           break;
+        case "httpd":
+          assertEquals("(type: String, logFormat: String, timestampFormat: 
String)", d.presentParams());
+          break;
         default:
           fail("add validation for format plugin type " + d.typeName);
       }

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
deleted file mode 100644
index 961d9a6..0000000
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/HttpdParserTest.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2015 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.httpd;
-
-import io.netty.buffer.DrillBuf;
-import java.util.Map;
-import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class HttpdParserTest {
-
-  private static final Logger LOG = 
LoggerFactory.getLogger(HttpdParserTest.class);
-
-  private void runTest(String logFormat, String logLine) throws Exception {
-    MapWriter mapWriter = null;
-    DrillBuf managedBuffer = null;
-    Map<String, String> configuredFields = null;
-    HttpdParser parser = new HttpdParser(mapWriter, managedBuffer, logFormat, 
null, configuredFields);
-    parser.parse(logLine);
-  }
-
-//  @Test
-  public void testFirstPattern() throws Exception {
-    LOG.info("testFirstPattern");
-//    final String format = "common";
-//    final String format = "%h %l %u %t \"%r\" %>s %b";
-    final String format = "%h %t \"%r\" %>s %b \"%{Referer}i\"";
-    final String line = "127.0.0.1 [31/Dec/2012:23:49:41 +0100] "
-        + "\"GET /foo HTTP/1.1\" 200 "
-        + "1213 \"http://localhost/index.php?mies=wim\"";;
-    runTest(format, line);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdLogFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdLogFormatPlugin.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdLogFormatPlugin.java
deleted file mode 100644
index 6c2eaf8..0000000
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdLogFormatPlugin.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license agreements. See the NOTICE
- * file distributed with this work for additional information regarding 
copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the "License"); you may not 
use this file except in compliance with the
- * License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License is distributed on
- * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
express or implied. See the License for the
- * specific language governing permissions and limitations under the License.
- */
-package org.apache.drill.exec.store.httpd;
-
-import java.util.List;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import static org.junit.Assert.*;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestHttpdLogFormatPlugin extends BaseTestQuery {
-
-  private static final Logger LOG = 
LoggerFactory.getLogger(TestHttpdLogFormatPlugin.class);
-
-  /**
-   * This test covers the test bootstrap-storage-plugins.json section of httpd.
-   *
-   * Indirectly this validates the HttpdLogFormatPlugin.HttpdLogFormatConfig 
deserializing properly.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testDfsTestBootstrap_star() throws Exception {
-    test("select * from 
dfs_test.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-test-bootstrap-test.httpd`");
-  }
-
-  /**
-   * This test covers the test bootstrap-storage-plugins.json section of httpd.
-   *
-   * Indirectly this validates the HttpdLogFormatPlugin.HttpdLogFormatConfig 
deserializing properly.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testDfsTestBootstrap_notstar() throws Exception {
-    test("select `TIME_STAMP:request_receive_time`, 
`HTTP_METHOD:request_firstline_method`, `STRING:request_status_last`, 
`BYTES:response_body_bytesclf` \n"
-        + "from 
dfs_test.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-test-bootstrap-test.httpd`");
-  }
-
-  /**
-   * This test covers the main bootstrap-storage-plugins.json section of httpd.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testDfsBootstrap_star() throws Exception {
-    test("select * from 
dfs.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-bootstrap.httpd`");
-  }
-
-  /**
-   * This test covers the main bootstrap-storage-plugins.json section of httpd.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testDfsBootstrap_wildcard() throws Exception {
-    test("select `STRING:request_referer_query_$` from 
dfs.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-bootstrap.httpd`");
-  }
-
-  /**
-   * This test covers the main bootstrap-storage-plugins.json section of httpd.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testDfsBootstrap_underscore() throws Exception {
-    test("select `TIME_DAY:request_receive_time_day__utc` from 
dfs.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-bootstrap.httpd`");
-  }
-
-  @Test
-  public void testGroupBy_1() throws Exception {
-    final List<QueryDataBatch> actualResults = testSqlWithResults(
-        "select `HTTP_METHOD:request_firstline_method` as http_method, 
`STRING:request_status_last` as status_code, 
sum(`BYTES:response_body_bytesclf`) as total_bytes \n"
-        + "from 
dfs_test.`${WORKING_PATH}/src/test/resources/store/httpd/dfs-test-bootstrap-test.httpd`\n"
-        + "group by `HTTP_METHOD:request_firstline_method`, 
`STRING:request_status_last`"
-    );
-
-    final TestResultSet expectedResultSet = new TestResultSet();
-    expectedResultSet.addRow("GET", "200", "46551");
-    expectedResultSet.addRow("POST", "302", "18186");
-
-    TestResultSet actualResultSet = new TestResultSet(actualResults);
-    assertTrue(expectedResultSet.equals(actualResultSet));
-  }
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/4a82bc13/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdPlugin.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdPlugin.java
deleted file mode 100644
index ce1f685..0000000
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHttpdPlugin.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.httpd;
-
-import org.apache.drill.BaseTestQuery;
-import org.junit.Test;
-
-public class TestHttpdPlugin extends BaseTestQuery {
-  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestHttpdPlugin.class);
-
-  @Test
-  public void tryBasicQuery() throws Exception {
-    // test("select * from cp.`store/httpd/example1.httpd`");
-    test("select * from 
dfs.`${WORKING_PATH}/src/test/resources/store/httpd/example1.httpd`");
-  }
-}

Reply via email to