Repository: drill
Updated Branches:
  refs/heads/master dfe47ce04 -> 766315ea1


DRILL-4120: Allow implicit columns for Avro storage format

closes #1138


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/4652b0ba
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/4652b0ba
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/4652b0ba

Branch: refs/heads/master
Commit: 4652b0ba4f9a0708227e2b83a7097ff0517df33e
Parents: dfe47ce
Author: Volodymyr Vysotskyi <vvo...@gmail.com>
Authored: Mon Mar 5 14:25:33 2018 +0200
Committer: Ben-Zvi <bben-...@mapr.com>
Committed: Wed Mar 7 14:53:17 2018 -0800

----------------------------------------------------------------------
 .../exec/store/mapr/TableFormatMatcher.java     |   7 +-
 .../exec/store/mapr/db/MapRDBFormatMatcher.java |   5 +-
 .../planner/logical/ExtendableRelDataType.java  |  42 ++++++++
 .../drill/exec/planner/sql/SqlConverter.java    |  28 +++++
 .../types/AbstractRelDataTypeHolder.java        |  71 +++++++++++++
 .../types/ExtendableRelDataTypeHolder.java      |  82 +++++++++++++++
 .../planner/types/RelDataTypeDrillImpl.java     |   6 +-
 .../exec/planner/types/RelDataTypeHolder.java   |  39 +------
 .../apache/drill/exec/store/ColumnExplorer.java |  51 +++++++++
 .../drill/exec/store/avro/AvroDrillTable.java   |  49 ++++++---
 .../drill/exec/store/avro/AvroFormatPlugin.java |   6 +-
 .../exec/store/dfs/BasicFormatMatcher.java      |  10 +-
 .../drill/exec/store/dfs/FormatMatcher.java     |   6 +-
 .../exec/store/dfs/WorkspaceSchemaFactory.java  |   6 +-
 .../exec/store/parquet/ParquetFormatPlugin.java |   9 +-
 .../drill/exec/store/pcap/PcapFormatPlugin.java |   6 +-
 .../java/org/apache/drill/PlanningBase.java     |   3 +-
 .../drill/exec/store/avro/AvroFormatTest.java   | 104 ++++++++++++++++++-
 .../drill/exec/store/avro/AvroTestUtil.java     |  18 +++-
 19 files changed, 471 insertions(+), 77 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/TableFormatMatcher.java
----------------------------------------------------------------------
diff --git 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/TableFormatMatcher.java
 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/TableFormatMatcher.java
index 192e57d..f663fce 100644
--- 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/TableFormatMatcher.java
+++ 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/TableFormatMatcher.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,6 +21,7 @@ import java.io.IOException;
 
 import org.apache.drill.exec.planner.logical.DrillTable;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.FileSelection;
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
@@ -45,13 +46,13 @@ public abstract class TableFormatMatcher extends 
FormatMatcher {
 
   public DrillTable isReadable(DrillFileSystem fs,
       FileSelection selection, FileSystemPlugin fsPlugin,
-      String storageEngineName, String userName) throws IOException {
+      String storageEngineName, SchemaConfig schemaConfig) throws IOException {
     FileStatus status = selection.getFirstPath(fs);
     if (!isFileReadable(fs, status)) {
       return null;
     }
 
-    return new DynamicDrillTable(fsPlugin, storageEngineName, userName,
+    return new DynamicDrillTable(fsPlugin, storageEngineName, 
schemaConfig.getUserName(),
         new FormatSelection(getFormatPlugin().getConfig(), selection));
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatMatcher.java
----------------------------------------------------------------------
diff --git 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatMatcher.java
 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatMatcher.java
index f16a2ce..4ce6b7d 100644
--- 
a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatMatcher.java
+++ 
b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBFormatMatcher.java
@@ -23,6 +23,7 @@ import java.util.List;
 import com.mapr.fs.tables.TableProperties;
 import org.apache.drill.exec.planner.logical.DrillTable;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.FileSelection;
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
@@ -52,7 +53,7 @@ public class MapRDBFormatMatcher extends TableFormatMatcher {
   @Override
   public DrillTable isReadable(DrillFileSystem fs,
                                FileSelection selection, FileSystemPlugin 
fsPlugin,
-                               String storageEngineName, String userName) 
throws IOException {
+                               String storageEngineName, SchemaConfig 
schemaConfig) throws IOException {
 
     if (isFileReadable(fs, selection.getFirstPath(fs))) {
       List<String> files = selection.getFiles();
@@ -61,7 +62,7 @@ public class MapRDBFormatMatcher extends TableFormatMatcher {
       TableProperties props = 
getFormatPlugin().getMaprFS().getTableProperties(new Path(tableName));
 
       if (props.getAttr().getJson()) {
-        return new DynamicDrillTable(fsPlugin, storageEngineName, userName,
+        return new DynamicDrillTable(fsPlugin, storageEngineName, 
schemaConfig.getUserName(),
             new FormatSelection(getFormatPlugin().getConfig(), selection));
       } else {
         FormatSelection formatSelection = new 
FormatSelection(getFormatPlugin().getConfig(), selection);

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExtendableRelDataType.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExtendableRelDataType.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExtendableRelDataType.java
new file mode 100644
index 0000000..5a3526e
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/ExtendableRelDataType.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.logical;
+
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.drill.exec.planner.types.ExtendableRelDataTypeHolder;
+import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
+
+/**
+ * RelDataType for non-dynamic table structure which
+ * may be extended by adding partitions or implicit columns.
+ */
+public class ExtendableRelDataType extends RelDataTypeDrillImpl {
+
+  public ExtendableRelDataType(ExtendableRelDataTypeHolder holder, 
RelDataTypeFactory typeFactory) {
+    super(holder, typeFactory);
+  }
+
+  @Override
+  protected void generateTypeString(StringBuilder sb, boolean withDetail) {
+    sb.append("(ExtendableRelDataType").append(getFieldNames()).append(")");
+  }
+
+  @Override
+  public boolean isDynamicStruct() {
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
index af3c2bf..9821bf3 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SqlConverter.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.planner.sql;
 
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
@@ -83,6 +84,7 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.rpc.user.UserSession;
 
 import com.google.common.base.Joiner;
+import org.apache.drill.exec.store.ColumnExplorer;
 
 /**
  * Class responsible for managing parsing, validation and toRel conversion for 
sql statements.
@@ -277,6 +279,32 @@ public class SqlConverter {
       return SqlValidatorUtil.getAlias(node, ordinal);
     }
 
+    /**
+     * Checks that specified expression is not implicit column and
+     * adds it to a select list, ensuring that its alias does not
+     * clash with any existing expressions on the list.
+     * <p>
+     * This method may be used when {@link RelDataType#isDynamicStruct}
+     * method returns false. Each column from table row type except
+     * the implicit is added into specified list, aliases and fieldList.
+     * In the opposite case when {@link RelDataType#isDynamicStruct}
+     * returns true, only dynamic star is added into specified
+     * list, aliases and fieldList.
+     */
+    @Override
+    protected void addToSelectList(
+        List<SqlNode> list,
+        Set<String> aliases,
+        List<Map.Entry<String, RelDataType>> fieldList,
+        SqlNode exp,
+        SqlValidatorScope scope,
+        final boolean includeSystemVars) {
+      if (!ColumnExplorer.initImplicitFileColumns(session.getOptions())
+          .containsKey(SqlValidatorUtil.getAlias(exp, -1))) {
+        super.addToSelectList(list, aliases, fieldList, exp, scope, 
includeSystemVars);
+      }
+    }
+
     private void changeNamesIfTableIsTemporary(SqlIdentifier tempNode) {
       List<String> temporaryTableNames = 
((SqlConverter.DrillCalciteCatalogReader) 
getCatalogReader()).getTemporaryNames(tempNode.names);
       if (temporaryTableNames != null) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/AbstractRelDataTypeHolder.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/AbstractRelDataTypeHolder.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/AbstractRelDataTypeHolder.java
new file mode 100644
index 0000000..3c90ce9
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/AbstractRelDataTypeHolder.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.types;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeField;
+
+import java.util.List;
+
+/**
+ * Base class-holder for the list of {@link RelDataTypeField}s.
+ */
+public abstract class AbstractRelDataTypeHolder {
+  protected final List<RelDataTypeField> fields;
+  protected RelDataTypeFactory typeFactory;
+
+  public AbstractRelDataTypeHolder(List<RelDataTypeField> fields) {
+    this.fields = Lists.newArrayList(fields);
+  }
+
+  /**
+   * Returns RelDataTypeField field with specified name.
+   */
+  public abstract RelDataTypeField getField(RelDataTypeFactory typeFactory, 
String fieldName);
+
+  /**
+   * Returns list with all RelDataTypeField fields in this holder.
+   */
+  public List<RelDataTypeField> getFieldList(RelDataTypeFactory typeFactory) {
+    return ImmutableList.copyOf(fields);
+  }
+
+  /**
+   * Returns count of RelDataTypeField fields in this holder.
+   */
+  public int getFieldCount() {
+    return fields.size();
+  }
+
+  /**
+   * Returns list with names of RelDataTypeField fields.
+   */
+  public List<String> getFieldNames() {
+    List<String> fieldNames = Lists.newArrayList();
+    for(RelDataTypeField f : fields) {
+      fieldNames.add(f.getName());
+    }
+
+    return fieldNames;
+  }
+
+  public void setRelDataTypeFactory(RelDataTypeFactory typeFactory) {
+    this.typeFactory = typeFactory;
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/ExtendableRelDataTypeHolder.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/ExtendableRelDataTypeHolder.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/ExtendableRelDataTypeHolder.java
new file mode 100644
index 0000000..6770432
--- /dev/null
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/ExtendableRelDataTypeHolder.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.planner.types;
+
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
+import org.apache.calcite.sql.type.SqlTypeName;
+
+import java.util.List;
+
+/**
+ * Holder for list of RelDataTypeField which may be expanded by implicit 
columns.
+ */
+public class ExtendableRelDataTypeHolder extends AbstractRelDataTypeHolder {
+  private final List<String> implicitColumnNames;
+
+  public ExtendableRelDataTypeHolder(List<RelDataTypeField> fields, 
List<String> implicitColumnNames) {
+    super(fields);
+    this.implicitColumnNames = implicitColumnNames;
+  }
+
+  /**
+   * Returns RelDataTypeField field with specified name.
+   * If field is implicit and absent in the fields list, it will be added.
+   *
+   * @param typeFactory RelDataTypeFactory which will be used
+   *                    for the creation of RelDataType for new fields.
+   * @param fieldName   name of the field.
+   * @return RelDataTypeField field
+   */
+  public RelDataTypeField getField(RelDataTypeFactory typeFactory, String 
fieldName) {
+
+    /* First check if this field name exists in our field list */
+    for (RelDataTypeField f : fields) {
+      if (fieldName.equalsIgnoreCase(f.getName())) {
+        return f;
+      }
+    }
+    RelDataTypeField newField = null;
+
+    if (isImplicitField(fieldName)) {
+      // This implicit field does not exist in our field list, add it
+      newField = new RelDataTypeFieldImpl(
+          fieldName,
+          fields.size(),
+          typeFactory.createTypeWithNullability(
+              typeFactory.createSqlType(SqlTypeName.VARCHAR), true));
+      fields.add(newField);
+    }
+    return newField;
+  }
+
+  /**
+   * Checks that specified field is implicit.
+   *
+   * @param fieldName name of the field which should be checked
+   * @return {@code true} if specified filed is implicit
+   */
+  private boolean isImplicitField(String fieldName) {
+    for (String implicitColumn : implicitColumnNames) {
+      if (implicitColumn.equalsIgnoreCase(fieldName)) {
+        return true;
+      }
+    }
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
index d332434..6438031 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -37,9 +37,9 @@ import org.apache.calcite.sql.type.SqlTypeName;
 public class RelDataTypeDrillImpl extends DynamicRecordType {
 
     private final RelDataTypeFactory typeFactory;
-    private final RelDataTypeHolder holder;
+    private final AbstractRelDataTypeHolder holder;
 
-    public RelDataTypeDrillImpl(RelDataTypeHolder holder, RelDataTypeFactory 
typeFactory) {
+    public RelDataTypeDrillImpl(AbstractRelDataTypeHolder holder, 
RelDataTypeFactory typeFactory) {
         this.typeFactory = typeFactory;
         this.holder = holder;
         this.holder.setRelDataTypeFactory(typeFactory);

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeHolder.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeHolder.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeHolder.java
index ad2091c..0291e0f 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeHolder.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeHolder.java
@@ -19,21 +19,20 @@ package org.apache.drill.exec.planner.types;
 
 import java.util.List;
 
+import com.google.common.collect.Lists;
 import org.apache.calcite.rel.type.DynamicRecordType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
 import org.apache.calcite.sql.type.SqlTypeName;
 
-import com.google.common.collect.Lists;
 import org.apache.drill.common.expression.SchemaPath;
 
-public class RelDataTypeHolder {
-  static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(RelDataTypeHolder.class);
+public class RelDataTypeHolder extends AbstractRelDataTypeHolder {
 
-  private final List<RelDataTypeField> fields = Lists.newArrayList();
-
-  private RelDataTypeFactory typeFactory;
+  public RelDataTypeHolder() {
+    super(Lists.<RelDataTypeField>newArrayList());
+  }
 
   public List<RelDataTypeField> getFieldList(RelDataTypeFactory typeFactory) {
     addStarIfEmpty(typeFactory);
@@ -75,32 +74,4 @@ public class RelDataTypeHolder {
 
     return newField;
   }
-
-  public List<String> getFieldNames() {
-    List<String> fieldNames = Lists.newArrayList();
-    for(RelDataTypeField f : fields){
-      fieldNames.add(f.getName());
-    };
-
-    return fieldNames;
-  }
-
-  public void setRelDataTypeFactory(RelDataTypeFactory typeFactory) {
-    this.typeFactory = typeFactory;
-  }
-
-  @Override
-  public int hashCode() {
-    return System.identityHashCode(this);
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    return (this == obj);
-  }
-
-  private List<RelDataTypeField> getFieldList() {
-    return getFieldList(this.typeFactory);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
index 62d46d6..73b96ce 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
@@ -28,6 +28,7 @@ import org.apache.drill.common.map.CaseInsensitiveMap;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.server.options.OptionValue;
+import org.apache.drill.exec.store.dfs.FileSelection;
 import org.apache.drill.exec.store.dfs.easy.FileWork;
 import org.apache.drill.exec.util.Utilities;
 import org.apache.hadoop.fs.Path;
@@ -79,6 +80,23 @@ public class ColumnExplorer {
   }
 
   /**
+   * Returns list with implicit column names taken from specified {@link 
SchemaConfig}.
+   *
+   * @param schemaConfig the source of session options values.
+   * @return list with implicit column names.
+   */
+  public static List<String> getImplicitColumnsNames(SchemaConfig 
schemaConfig) {
+    List<String> implicitColumns = Lists.newArrayList();
+    for (ImplicitFileColumns e : ImplicitFileColumns.values()) {
+      OptionValue optionValue;
+      if ((optionValue = schemaConfig.getOption(e.name)) != null) {
+        implicitColumns.add(optionValue.string_val);
+      }
+    }
+    return implicitColumns;
+  }
+
+  /**
    * Checks if given column is partition or not.
    *
    * @param optionManager options
@@ -105,6 +123,39 @@ public class ColumnExplorer {
   }
 
   /**
+   * Returns list with partition column names.
+   * For the case when table has several levels of nesting, max level is 
chosen.
+   *
+   * @param selection    the source of file paths
+   * @param schemaConfig the source of session option value for partition 
column label
+   * @return list with partition column names.
+   */
+  public static List<String> getPartitionColumnNames(FileSelection selection, 
SchemaConfig schemaConfig) {
+    int partitionsCount = 0;
+    // a depth of table root path
+    int rootDepth = new Path(selection.getSelectionRoot()).depth();
+
+    for (String file : selection.getFiles()) {
+      // Calculates partitions count for the concrete file:
+      // depth of file path - depth of table root path - 1.
+      // The depth of file path includes file itself,
+      // so we should subtract 1 to consider only directories.
+      int currentPartitionsCount = new Path(file).depth() - rootDepth - 1;
+      // max depth of files path should be used to handle all partitions
+      partitionsCount = Math.max(partitionsCount, currentPartitionsCount);
+    }
+
+    String partitionColumnLabel = 
schemaConfig.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+    List<String> partitions = Lists.newArrayList();
+
+    // generates partition column names: dir0, dir1 etc.
+    for (int i = 0; i < partitionsCount; i++) {
+      partitions.add(partitionColumnLabel + i);
+    }
+    return partitions;
+  }
+
+  /**
    * Compares selection root and actual file path to determine partition 
columns values.
    * Adds implicit file columns according to columns list.
    *

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroDrillTable.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroDrillTable.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroDrillTable.java
index 63444fc..d89079b 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroDrillTable.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroDrillTable.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -31,6 +31,10 @@ import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.planner.logical.ExtendableRelDataType;
+import org.apache.drill.exec.planner.types.ExtendableRelDataTypeHolder;
+import org.apache.drill.exec.store.ColumnExplorer;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 import org.apache.drill.exec.store.dfs.FormatSelection;
 import org.apache.hadoop.fs.Path;
@@ -38,17 +42,20 @@ import org.apache.hadoop.fs.Path;
 import com.google.common.collect.Lists;
 
 public class AvroDrillTable extends DrillTable {
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(AvroDrillTable.class);
 
-  static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(AvroDrillTable.class);
-  private DataFileReader<GenericContainer> reader = null;
+  private final DataFileReader<GenericContainer> reader;
+  private final SchemaConfig schemaConfig;
+  private ExtendableRelDataTypeHolder holder;
 
   public AvroDrillTable(String storageEngineName,
                        FileSystemPlugin plugin,
-                       String userName,
+                       SchemaConfig schemaConfig,
                        FormatSelection selection) {
-    super(storageEngineName, plugin, userName, selection);
+    super(storageEngineName, plugin, schemaConfig.getUserName(), selection);
     List<String> asFiles = selection.getAsFiles();
     Path path = new Path(asFiles.get(0));
+    this.schemaConfig = schemaConfig;
     try {
       reader = new DataFileReader<>(new FsInput(path, plugin.getFsConf()), new 
GenericDatumReader<GenericContainer>());
     } catch (IOException e) {
@@ -58,16 +65,32 @@ public class AvroDrillTable extends DrillTable {
 
   @Override
   public RelDataType getRowType(RelDataTypeFactory typeFactory) {
-    List<RelDataType> typeList = Lists.newArrayList();
-    List<String> fieldNameList = Lists.newArrayList();
+    // ExtendableRelDataTypeHolder is reused to preserve previously added 
implicit columns
+    if (holder == null) {
+      List<RelDataType> typeList = Lists.newArrayList();
+      List<String> fieldNameList = Lists.newArrayList();
 
-    Schema schema = reader.getSchema();
-    for (Field field : schema.getFields()) {
-      fieldNameList.add(field.name());
-      typeList.add(getNullableRelDataTypeFromAvroType(typeFactory, 
field.schema()));
+      // adds partition columns to RowType since they always present in star 
queries
+      List<String> partitions =
+          ColumnExplorer.getPartitionColumnNames(((FormatSelection) 
getSelection()).getSelection(), schemaConfig);
+      for (String partitionName : partitions) {
+        fieldNameList.add(partitionName);
+        
typeList.add(typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.VARCHAR),
 true));
+      }
+
+      // adds non-partition table columns to RowType
+      Schema schema = reader.getSchema();
+      for (Field field : schema.getFields()) {
+        fieldNameList.add(field.name());
+        typeList.add(getNullableRelDataTypeFromAvroType(typeFactory, 
field.schema()));
+      }
+
+      holder = new ExtendableRelDataTypeHolder(
+          typeFactory.createStructType(typeList, fieldNameList).getFieldList(),
+          ColumnExplorer.getImplicitColumnsNames(schemaConfig));
     }
 
-    return typeFactory.createStructType(typeList, fieldNameList);
+    return new ExtendableRelDataType(holder, typeFactory);
   }
 
   private RelDataType getNullableRelDataTypeFromAvroType(
@@ -133,4 +156,4 @@ public class AvroDrillTable extends DrillTable {
     }
     return relDataType;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroFormatPlugin.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroFormatPlugin.java
index fd6e59b..b9f6690 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroFormatPlugin.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroFormatPlugin.java
@@ -30,6 +30,7 @@ import 
org.apache.drill.exec.proto.UserBitShared.CoreOperatorType;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.dfs.BasicFormatMatcher;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.FileSelection;
@@ -102,9 +103,10 @@ public class AvroFormatPlugin extends 
EasyFormatPlugin<AvroFormatConfig> {
     @Override
     public DrillTable isReadable(DrillFileSystem fs,
         FileSelection selection, FileSystemPlugin fsPlugin,
-        String storageEngineName, String userName) throws IOException {
+        String storageEngineName, SchemaConfig schemaConfig) throws 
IOException {
       if (isFileReadable(fs, selection.getFirstPath(fs))) {
-        return new AvroDrillTable(storageEngineName, fsPlugin, userName, new 
FormatSelection(plugin.getConfig(), selection));
+        return new AvroDrillTable(storageEngineName, fsPlugin, schemaConfig,
+            new FormatSelection(plugin.getConfig(), selection));
       }
       return null;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
index 6526073..4104f67 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/BasicFormatMatcher.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -25,6 +25,7 @@ import java.util.regex.Pattern;
 
 import org.apache.drill.exec.planner.logical.DrillTable;
 import org.apache.drill.exec.planner.logical.DynamicDrillTable;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -36,7 +37,7 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Range;
 
-public class BasicFormatMatcher extends FormatMatcher{
+public class BasicFormatMatcher extends FormatMatcher {
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(BasicFormatMatcher.class);
 
   protected final FormatPlugin plugin;
@@ -75,9 +76,10 @@ public class BasicFormatMatcher extends FormatMatcher{
   @Override
   public DrillTable isReadable(DrillFileSystem fs,
       FileSelection selection, FileSystemPlugin fsPlugin,
-      String storageEngineName, String userName) throws IOException {
+      String storageEngineName, SchemaConfig schemaConfig) throws IOException {
     if (isFileReadable(fs, selection.getFirstPath(fs))) {
-      return new DynamicDrillTable(fsPlugin, storageEngineName, userName, new 
FormatSelection(plugin.getConfig(), selection));
+      return new DynamicDrillTable(fsPlugin, storageEngineName, 
schemaConfig.getUserName(),
+          new FormatSelection(plugin.getConfig(), selection));
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
index fa9b4a1..278d5f9 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatMatcher.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,17 +18,17 @@
 package org.apache.drill.exec.store.dfs;
 
 import org.apache.drill.exec.planner.logical.DrillTable;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.hadoop.fs.FileStatus;
 
 import java.io.IOException;
 
 public abstract class FormatMatcher {
-  static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(FormatMatcher.class);
 
   public abstract boolean supportDirectoryReads();
   public abstract DrillTable isReadable(DrillFileSystem fs,
       FileSelection selection, FileSystemPlugin fsPlugin,
-      String storageEngineName, String userName) throws IOException;
+      String storageEngineName, SchemaConfig schemaConfig) throws IOException;
   public abstract boolean isFileReadable(DrillFileSystem fs, FileStatus 
status) throws IOException;
   public abstract FormatPlugin getFormatPlugin();
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index 7640c13..1ff6324 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -598,7 +598,7 @@ public class WorkspaceSchemaFactory {
     }
 
     private DrillTable isReadable(FormatMatcher m, FileSelection 
fileSelection) throws IOException {
-      return m.isReadable(getFS(), fileSelection, plugin, storageEngineName, 
schemaConfig.getUserName());
+      return m.isReadable(getFS(), fileSelection, plugin, storageEngineName, 
schemaConfig);
     }
 
     @Override
@@ -619,7 +619,7 @@ public class WorkspaceSchemaFactory {
         if (hasDirectories) {
           for (final FormatMatcher matcher : dirMatchers) {
             try {
-              DrillTable table = matcher.isReadable(getFS(), fileSelection, 
plugin, storageEngineName, schemaConfig.getUserName());
+              DrillTable table = matcher.isReadable(getFS(), fileSelection, 
plugin, storageEngineName, schemaConfig);
               if (table != null) {
                 return table;
               }
@@ -637,7 +637,7 @@ public class WorkspaceSchemaFactory {
         }
 
         for (final FormatMatcher matcher : fileMatchers) {
-          DrillTable table = matcher.isReadable(getFS(), newSelection, plugin, 
storageEngineName, schemaConfig.getUserName());
+          DrillTable table = matcher.isReadable(getFS(), newSelection, plugin, 
storageEngineName, schemaConfig);
           if (table != null) {
             return table;
           }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
index e48239b..9b0794d 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetFormatPlugin.java
@@ -40,6 +40,7 @@ import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.RecordWriter;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.StoragePluginOptimizerRule;
 import org.apache.drill.exec.store.dfs.BasicFormatMatcher;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
@@ -219,7 +220,7 @@ public class ParquetFormatPlugin implements FormatPlugin{
 
     @Override
     public DrillTable isReadable(DrillFileSystem fs, FileSelection selection,
-        FileSystemPlugin fsPlugin, String storageEngineName, String userName)
+        FileSystemPlugin fsPlugin, String storageEngineName, SchemaConfig 
schemaConfig)
         throws IOException {
       if(selection.containsDirectories(fs)) {
         Path dirMetaPath = new Path(selection.getSelectionRoot(), 
Metadata.METADATA_DIRECTORIES_FILENAME);
@@ -237,16 +238,16 @@ public class ParquetFormatPlugin implements FormatPlugin{
             dirSelection.setExpandedPartial();
             dirSelection.setMetaContext(metaContext);
 
-            return new DynamicDrillTable(fsPlugin, storageEngineName, userName,
+            return new DynamicDrillTable(fsPlugin, storageEngineName, 
schemaConfig.getUserName(),
                 new FormatSelection(plugin.getConfig(), dirSelection));
           }
         }
         if(isDirReadable(fs, selection.getFirstPath(fs))) {
-          return new DynamicDrillTable(fsPlugin, storageEngineName, userName,
+          return new DynamicDrillTable(fsPlugin, storageEngineName, 
schemaConfig.getUserName(),
               new FormatSelection(plugin.getConfig(), selection));
         }
       }
-      return super.isReadable(fs, selection, fsPlugin, storageEngineName, 
userName);
+      return super.isReadable(fs, selection, fsPlugin, storageEngineName, 
schemaConfig);
     }
 
     private Path getMetadataPath(FileStatus dir) {

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/main/java/org/apache/drill/exec/store/pcap/PcapFormatPlugin.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pcap/PcapFormatPlugin.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pcap/PcapFormatPlugin.java
index 65ff238..3fa8aa6 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pcap/PcapFormatPlugin.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pcap/PcapFormatPlugin.java
@@ -27,6 +27,7 @@ import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
+import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.dfs.BasicFormatMatcher;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.FileSelection;
@@ -96,9 +97,10 @@ public class PcapFormatPlugin extends 
EasyFormatPlugin<PcapFormatConfig> {
     @Override
     public DrillTable isReadable(DrillFileSystem fs,
                                  FileSelection selection, FileSystemPlugin 
fsPlugin,
-                                 String storageEngineName, String userName) 
throws IOException {
+                                 String storageEngineName, SchemaConfig 
schemaConfig) throws IOException {
       if (isFileReadable(fs, selection.getFirstPath(fs))) {
-        return new PcapDrillTable(storageEngineName, fsPlugin, userName, new 
FormatSelection(plugin.getConfig(), selection));
+        return new PcapDrillTable(storageEngineName, fsPlugin, 
schemaConfig.getUserName(),
+            new FormatSelection(plugin.getConfig(), selection));
       }
       return null;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java 
b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
index 472419a..0dc62c6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanningBase.java
@@ -105,7 +105,8 @@ public class PlanningBase extends ExecTest {
     when(context.getLpPersistence()).thenReturn(new 
LogicalPlanPersistence(config, ClassPathScanner.fromPrescan(config)));
     when(context.getStorage()).thenReturn(registry);
     when(context.getFunctionRegistry()).thenReturn(functionRegistry);
-    
when(context.getSession()).thenReturn(UserSession.Builder.newBuilder().setSupportComplexTypes(true).build());
+    when(context.getSession()).thenReturn(
+        
UserSession.Builder.newBuilder().withOptionManager(sessionOptions).setSupportComplexTypes(true).build());
     
when(context.getCurrentEndpoint()).thenReturn(DrillbitEndpoint.getDefaultInstance());
     
when(context.getActiveEndpoints()).thenReturn(ImmutableList.of(DrillbitEndpoint.getDefaultInstance()));
     when(context.getPlannerSettings()).thenReturn(new 
PlannerSettings(queryOptions, functionRegistry));

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
index 6436c1c..930c41b 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +18,8 @@
 package org.apache.drill.exec.store.avro;
 
 import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.TestBuilder;
 import org.apache.drill.common.exceptions.UserException;
@@ -26,6 +28,7 @@ import org.apache.drill.exec.util.JsonStringHashMap;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.io.File;
 import java.util.List;
 import java.util.Map;
 
@@ -192,6 +195,105 @@ public class AvroFormatTest extends BaseTestQuery {
   }
 
   @Test
+  public void testImplicitColumnsWithStar() throws Exception {
+    AvroTestUtil.AvroTestRecordWriter testWriter = 
generateSimplePrimitiveSchema_NoNullValues(1);
+    final String file = testWriter.getFileName();
+    // removes "." and ".." from the path
+    String tablePath = new File(testWriter.getFilePath()).getCanonicalPath();
+
+    List<Map<String, Object>> expectedRecords = 
testWriter.getExpectedRecords();
+    expectedRecords.get(0).put("`filename`", file);
+    expectedRecords.get(0).put("`suffix`", "avro");
+    expectedRecords.get(0).put("`fqn`", tablePath);
+    expectedRecords.get(0).put("`filepath`", new File(tablePath).getParent());
+    try {
+      testBuilder()
+          .sqlQuery("select filename, *, suffix, fqn, filepath from dfs.`%s`", 
file)
+          .unOrdered()
+          .baselineRecords(expectedRecords)
+          .go();
+    } finally {
+      FileUtils.deleteQuietly(new File(tablePath));
+    }
+  }
+
+  @Test
+  public void testImplicitColumnAlone() throws Exception {
+    AvroTestUtil.AvroTestRecordWriter testWriter = 
generateSimplePrimitiveSchema_NoNullValues(1);
+    final String file = testWriter.getFileName();
+    // removes "." and ".." from the path
+    String tablePath = new File(testWriter.getFilePath()).getCanonicalPath();
+    try {
+      testBuilder()
+          .sqlQuery("select filename from dfs.`%s`", file)
+          .unOrdered()
+          .baselineColumns("filename")
+          .baselineValues(file)
+          .go();
+    } finally {
+      FileUtils.deleteQuietly(new File(tablePath));
+    }
+  }
+
+  @Test
+  public void testImplicitColumnInWhereClause() throws Exception {
+    AvroTestUtil.AvroTestRecordWriter testWriter = 
generateSimplePrimitiveSchema_NoNullValues(1);
+    final String file = testWriter.getFileName();
+    // removes "." and ".." from the path
+    String tablePath = new File(testWriter.getFilePath()).getCanonicalPath();
+
+    List<Map<String, Object>> expectedRecords = 
testWriter.getExpectedRecords();
+    try {
+      testBuilder()
+          .sqlQuery("select * from dfs.`%1$s` where filename = '%1$s'", file)
+          .unOrdered()
+          .baselineRecords(expectedRecords)
+          .go();
+    } finally {
+      FileUtils.deleteQuietly(new File(tablePath));
+    }
+  }
+
+  @Test
+  public void testPartitionColumn() throws Exception {
+    setSessionOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL, 
"directory");
+    String file = "avroTable";
+    String partitionColumn = "2018";
+    AvroTestUtil.AvroTestRecordWriter testWriter =
+        generateSimplePrimitiveSchema_NoNullValues(1, FileUtils.getFile(file, 
partitionColumn).getPath());
+    try {
+      testBuilder()
+          .sqlQuery("select directory0 from dfs.`%s`", file)
+          .unOrdered()
+          .baselineColumns("directory0")
+          .baselineValues(partitionColumn)
+          .go();
+    } finally {
+      FileUtils.deleteQuietly(new File(testWriter.getFilePath()));
+      resetSessionOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
+    }
+  }
+
+  @Test
+  public void testSelectAllWithPartitionColumn() throws Exception {
+    String file = "avroTable";
+    String partitionColumn = "2018";
+    AvroTestUtil.AvroTestRecordWriter testWriter =
+      generateSimplePrimitiveSchema_NoNullValues(1, FileUtils.getFile(file, 
partitionColumn).getPath());
+    List<Map<String, Object>> expectedRecords = 
testWriter.getExpectedRecords();
+    expectedRecords.get(0).put("`dir0`", partitionColumn);
+    try {
+      testBuilder()
+          .sqlQuery("select * from dfs.`%s`", file)
+          .unOrdered()
+          .baselineRecords(expectedRecords)
+          .go();
+    } finally {
+      FileUtils.deleteQuietly(new File(testWriter.getFilePath()));
+    }
+  }
+
+  @Test
   public void testSimpleArraySchema_NoNullValues() throws Exception {
     final String file = generateSimpleArraySchema_NoNullValues().getFileName();
     final String sql = "select a_string, c_string_array[0], e_float_array[2] 
from dfs.`%s`";

http://git-wip-us.apache.org/repos/asf/drill/blob/4652b0ba/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
index 1cb9284..11b3d39 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,6 +21,7 @@ import java.io.Closeable;
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -151,6 +152,18 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter 
generateSimplePrimitiveSchema_NoNullValues(int numRecords) throws Exception {
+    return generateSimplePrimitiveSchema_NoNullValues(numRecords, "");
+  }
+
+  /**
+   * Generates Avro table with specified rows number in specified path.
+   *
+   * @param numRecords rows number in the table
+   * @param tablePath  table path
+   * @return AvroTestRecordWriter instance
+   */
+  public static AvroTestRecordWriter 
generateSimplePrimitiveSchema_NoNullValues(int numRecords, String tablePath)
+      throws Exception {
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -164,7 +177,8 @@ public class AvroTestUtil {
             .name("h_boolean").type().booleanType().noDefault()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro", 
BaseTestQuery.dirTestWatcher.getRootDir());
+    final File file = File.createTempFile("avro-primitive-test", ".avro",
+        BaseTestQuery.dirTestWatcher.makeRootSubDir(Paths.get(tablePath)));
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
 
     try {

Reply via email to