DRILL-4327: Fix rawtypes warnings in drill codebase

Fixing most rawtypes warning issues in drill modules.

Closes #347


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/2ffe3117
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/2ffe3117
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/2ffe3117

Branch: refs/heads/master
Commit: 2ffe311739cda1942f2a74a27702cdd38e07ed3b
Parents: 5118d0c
Author: Laurent Goujon <[email protected]>
Authored: Wed Jan 27 19:01:13 2016 -0800
Committer: vkorukanti <[email protected]>
Committed: Wed Mar 2 14:08:51 2016 -0800

----------------------------------------------------------------------
 ...onvertHiveParquetScanToDrillParquetScan.java |  8 +-
 .../store/hive/DrillHiveMetaStoreClient.java    |  2 +-
 .../store/hive/HiveAuthorizationHelper.java     |  6 +-
 .../exec/store/hive/HiveMetadataProvider.java   |  2 +-
 .../drill/exec/store/hive/HiveRecordReader.java | 14 +--
 .../apache/drill/exec/store/hive/HiveTable.java |  2 +-
 .../drill/exec/store/hive/HiveUtilities.java    |  6 +-
 .../hive/TestStorageBasedHiveAuthorization.java |  6 +-
 .../drill/exec/store/kudu/KuduRecordReader.java |  1 -
 .../exec/store/kudu/KuduSchemaFactory.java      |  2 +-
 .../drill/exec/cache/DistributedCache.java      |  2 +-
 .../apache/drill/exec/expr/CodeGenerator.java   |  2 +-
 .../drill/exec/expr/EvaluationVisitor.java      |  7 +-
 .../fn/interpreter/InterpreterEvaluator.java    | 14 +--
 .../org/apache/drill/exec/ops/UdfUtilities.java |  8 +-
 .../exec/physical/base/AbstractExchange.java    |  9 +-
 .../drill/exec/physical/impl/ImplCreator.java   |  2 +-
 .../impl/mergereceiver/MergingRecordBatch.java  | 11 +--
 .../partitionsender/PartitionerDecorator.java   | 10 +--
 .../impl/producer/ProducerConsumerBatch.java    |  3 +-
 .../partition/RewriteAsBinaryOperators.java     | 11 +--
 .../drill/exec/planner/physical/PrelUtil.java   |  2 +-
 .../visitor/RewriteProjectToFlatten.java        | 24 ++---
 .../visitor/RexVisitorComplexExprSplitter.java  | 19 ++--
 .../visitor/SplitUpComplexExpressions.java      | 33 ++++---
 .../drill/exec/planner/sql/DrillSqlWorker.java  | 28 +++---
 .../types/DrillFixedRelDataTypeImpl.java        |  2 +-
 .../planner/types/RelDataTypeDrillImpl.java     |  2 +-
 .../exec/record/ExpandableHyperContainer.java   | 18 ++--
 .../drill/exec/record/HyperVectorWrapper.java   |  9 +-
 .../drill/exec/record/SimpleVectorWrapper.java  | 16 +---
 .../apache/drill/exec/schema/DataRecord.java    |  2 +-
 .../drill/exec/server/rest/StatusResources.java |  4 +-
 .../drill/exec/store/avro/AvroRecordReader.java | 17 ++--
 .../drill/exec/store/mock/MockRecordReader.java |  2 +-
 .../drill/exec/store/parquet/Metadata.java      |  3 +-
 .../exec/store/parquet/ParquetGroupScan.java    | 27 +++---
 .../store/parquet/columnreaders/BitReader.java  |  8 +-
 .../columnreaders/ColumnReaderFactory.java      | 37 ++++----
 .../columnreaders/FixedByteAlignedReader.java   | 57 +++++-------
 .../columnreaders/FixedWidthRepeatedReader.java | 27 ++----
 .../columnreaders/NullableBitReader.java        |  8 +-
 .../NullableFixedByteAlignedReaders.java        | 66 +++++---------
 .../store/parquet/columnreaders/PageReader.java | 13 +--
 .../ParquetFixedWidthDictionaryReaders.java     | 79 +++++-----------
 .../columnreaders/ParquetRecordReader.java      | 10 +--
 .../columnreaders/VarLenBinaryReader.java       | 16 ++--
 .../parquet/columnreaders/VarLengthColumn.java  |  3 +-
 .../exec/store/schedule/AssignmentCreator.java  | 18 ++--
 .../drill/exec/store/sys/SystemTable.java       |  2 +-
 .../org/apache/drill/exec/vector/CopyUtil.java  |  2 +-
 .../parquet/hadoop/ColumnChunkIncReadStore.java | 12 ++-
 .../java/org/apache/drill/DrillTestWrapper.java | 77 ++++++++--------
 .../test/java/org/apache/drill/TestBuilder.java | 34 ++++---
 .../org/apache/drill/TestFrameworkTest.java     |  7 +-
 .../drill/exec/HyperVectorValueIterator.java    | 10 +--
 .../exec/physical/impl/flatten/TestFlatten.java | 11 +--
 .../exec/physical/impl/join/TestMergeJoin.java  |  8 +-
 .../drill/exec/physical/impl/sort/TestSort.java |  4 +-
 .../exec/planner/TestDirectoryExplorerUDFs.java |  2 +-
 .../exec/record/vector/TestValueVector.java     |  9 +-
 .../exec/store/TestAffinityCalculator.java      |  4 +-
 .../drill/exec/store/avro/AvroTestUtil.java     | 95 ++++++++++----------
 .../exec/store/parquet/TestFileGenerator.java   | 22 +++--
 .../org/apache/drill/exec/testing/Controls.java | 15 ++--
 .../exec/testing/ControlsInjectionUtil.java     | 18 ++--
 .../org/apache/drill/jdbc/ITTestShadedJar.java  |  4 +-
 .../org/apache/drill/exec/rpc/BasicClient.java  |  2 +-
 .../org/apache/drill/exec/rpc/BasicServer.java  |  4 +-
 .../main/codegen/templates/BasicTypeHelper.java |  2 +-
 .../src/main/codegen/templates/UnionVector.java |  2 +-
 .../drill/exec/util/JsonStringArrayList.java    |  2 +-
 .../drill/exec/util/JsonStringHashMap.java      |  2 +-
 .../apache/drill/exec/vector/ZeroVector.java    |  2 +-
 .../vector/complex/impl/PromotableWriter.java   |  8 +-
 .../apache/drill/common/logical/data/Limit.java |  2 +-
 .../common/logical/data/LogicalOperator.java    |  2 +-
 .../logical/data/LogicalOperatorBase.java       |  2 +-
 78 files changed, 471 insertions(+), 573 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
index 97a5b98..f339957 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
@@ -97,7 +97,7 @@ public class ConvertHiveParquetScanToDrillParquetScan extends 
StoragePluginOptim
     final HiveConf hiveConf = hiveScan.getHiveConf();
     final Table hiveTable = hiveScan.hiveReadEntry.getTable();
 
-    final Class<? extends InputFormat> tableInputFormat =
+    final Class<? extends InputFormat<?,?>> tableInputFormat =
         getInputFormatFromSD(MetaStoreUtils.getTableMetadata(hiveTable), 
hiveScan.hiveReadEntry, hiveTable.getSd(),
             hiveConf);
     if (tableInputFormat == null || 
!tableInputFormat.equals(MapredParquetInputFormat.class)) {
@@ -113,7 +113,7 @@ public class ConvertHiveParquetScanToDrillParquetScan 
extends StoragePluginOptim
     // Make sure all partitions have the same input format as the table input 
format
     for (HivePartition partition : partitions) {
       final StorageDescriptor partitionSD = partition.getPartition().getSd();
-      Class<? extends InputFormat> inputFormat = getInputFormatFromSD(
+      Class<? extends InputFormat<?, ?>> inputFormat = getInputFormatFromSD(
           HiveUtilities.getPartitionMetadata(partition.getPartition(), 
hiveTable), hiveScan.hiveReadEntry, partitionSD,
           hiveConf);
       if (inputFormat == null || !inputFormat.equals(tableInputFormat)) {
@@ -142,13 +142,13 @@ public class ConvertHiveParquetScanToDrillParquetScan 
extends StoragePluginOptim
    * @param sd
    * @return {@link InputFormat} class or null if a failure has occurred. 
Failure is logged as warning.
    */
-  private Class<? extends InputFormat> getInputFormatFromSD(final Properties 
properties,
+  private Class<? extends InputFormat<?, ?>> getInputFormatFromSD(final 
Properties properties,
       final HiveReadEntry hiveReadEntry, final StorageDescriptor sd, final 
HiveConf hiveConf) {
     final Table hiveTable = hiveReadEntry.getTable();
     try {
       final String inputFormatName = sd.getInputFormat();
       if (!Strings.isNullOrEmpty(inputFormatName)) {
-        return (Class<? extends InputFormat>) Class.forName(inputFormatName);
+        return (Class<? extends InputFormat<?, ?>>) 
Class.forName(inputFormatName);
       }
 
       final JobConf job = new JobConf(hiveConf);

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java
index 17e3478..df3e8a2 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java
@@ -154,7 +154,7 @@ public abstract class DrillHiveMetaStoreClient extends 
HiveMetaStoreClient {
       logger.warn("Hive metastore cache expire policy is set to {}", 
expireAfterWrite? "expireAfterWrite" : "expireAfterAccess");
     }
 
-    final CacheBuilder cacheBuilder = CacheBuilder
+    final CacheBuilder<Object, Object> cacheBuilder = CacheBuilder
         .newBuilder();
 
     if (expireAfterWrite) {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java
index 643b121..4c8b815 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java
@@ -102,7 +102,7 @@ public class HiveAuthorizationHelper {
       return;
     }
 
-    authorize(HiveOperationType.SHOWDATABASES, Collections.EMPTY_LIST, 
Collections.EMPTY_LIST, "SHOW DATABASES");
+    authorize(HiveOperationType.SHOWDATABASES, 
Collections.<HivePrivilegeObject> emptyList(), 
Collections.<HivePrivilegeObject> emptyList(), "SHOW DATABASES");
   }
 
   /**
@@ -117,7 +117,7 @@ public class HiveAuthorizationHelper {
 
     final HivePrivilegeObject toRead = new 
HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null);
 
-    authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), 
Collections.EMPTY_LIST, "SHOW TABLES");
+    authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), 
Collections.<HivePrivilegeObject> emptyList(), "SHOW TABLES");
   }
 
   /**
@@ -132,7 +132,7 @@ public class HiveAuthorizationHelper {
     }
 
     HivePrivilegeObject toRead = new 
HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName);
-    authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), 
Collections.EMPTY_LIST, "READ TABLE");
+    authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), 
Collections.<HivePrivilegeObject> emptyList(), "READ TABLE");
   }
 
   /* Helper method to check privileges */

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
index 3ecc831..49f7689 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java
@@ -249,7 +249,7 @@ public class HiveMetadataProvider {
 
           if (fs.exists(path)) {
             FileInputFormat.addInputPath(job, path);
-            final InputFormat format = job.getInputFormat();
+            final InputFormat<?, ?> format = job.getInputFormat();
             for (final InputSplit split : format.getSplits(job, 1)) {
               splits.add(new InputSplitWrapper(split, partition));
             }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index 1634187..8631b8d 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -102,7 +102,7 @@ public class HiveRecordReader extends AbstractRecordReader {
   private Converter partTblObjectInspectorConverter;
 
   protected Object key;
-  protected RecordReader reader;
+  protected RecordReader<Object, Object> reader;
   protected List<ValueVector> vectors = Lists.newArrayList();
   protected List<ValueVector> pVectors = Lists.newArrayList();
   protected boolean empty;
@@ -223,7 +223,7 @@ public class HiveRecordReader extends AbstractRecordReader {
 
     if (!empty) {
       try {
-        reader = job.getInputFormat().getRecordReader(inputSplit, job, 
Reporter.NULL);
+        reader = (org.apache.hadoop.mapred.RecordReader<Object, Object>) 
job.getInputFormat().getRecordReader(inputSplit, job, Reporter.NULL);
       } catch (Exception e) {
         throw new ExecutionSetupException("Failed to get 
o.a.hadoop.mapred.RecordReader from Hive InputFormat", e);
       }
@@ -236,8 +236,8 @@ public class HiveRecordReader extends AbstractRecordReader {
    * Utility method which creates a SerDe object for given SerDe class name 
and properties.
    */
   private static SerDe createSerDe(final JobConf job, final String sLib, final 
Properties properties) throws Exception {
-    final Class<?> c = Class.forName(sLib);
-    final SerDe serde = (SerDe) c.getConstructor().newInstance();
+    final Class<? extends SerDe> c = 
Class.forName(sLib).asSubclass(SerDe.class);
+    final SerDe serde = c.getConstructor().newInstance();
     serde.initialize(job, properties);
 
     return serde;
@@ -252,7 +252,7 @@ public class HiveRecordReader extends AbstractRecordReader {
   }
 
   @Override
-  public void setup(@SuppressWarnings("unused") OperatorContext context, 
OutputMutator output)
+  public void setup(OperatorContext context, OutputMutator output)
       throws ExecutionSetupException {
     // initializes "reader"
     final Callable<Void> readerInitializer = new Callable<Void>() {
@@ -279,14 +279,14 @@ public class HiveRecordReader extends 
AbstractRecordReader {
       for (int i = 0; i < selectedColumnNames.size(); i++) {
         MajorType type = 
HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), options);
         MaterializedField field = 
MaterializedField.create(selectedColumnNames.get(i), type);
-        Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), 
type.getMode());
+        Class<? extends ValueVector> vvClass = 
TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode());
         vectors.add(output.addField(field, vvClass));
       }
 
       for (int i = 0; i < selectedPartitionNames.size(); i++) {
         MajorType type = 
HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), 
options);
         MaterializedField field = 
MaterializedField.create(selectedPartitionNames.get(i), type);
-        Class vvClass = 
TypeHelper.getValueVectorClass(field.getType().getMinorType(), 
field.getDataMode());
+        Class<? extends ValueVector> vvClass = 
TypeHelper.getValueVectorClass(field.getType().getMinorType(), 
field.getDataMode());
         pVectors.add(output.addField(field, vvClass));
       }
     } catch(SchemaChangeException e) {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
index 88fe8c3..b6dd079 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java
@@ -66,7 +66,7 @@ public class HiveTable {
   public String tableType;
 
   @JsonIgnore
-  public final Map<String, String> partitionNameTypeMap = new HashMap();
+  public final Map<String, String> partitionNameTypeMap = new HashMap<>();
 
   @JsonCreator
   public HiveTable(@JsonProperty("tableName") String tableName, 
@JsonProperty("dbName") String dbName, @JsonProperty("owner") String owner, 
@JsonProperty("createTime") int createTime,

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index e75afae..98f0e58 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -364,7 +364,7 @@ public class HiveUtilities {
    * @param table Table object
    * @throws Exception
    */
-  public static Class<? extends InputFormat> getInputFormatClass(final JobConf 
job, final StorageDescriptor sd,
+  public static Class<? extends InputFormat<?, ?>> getInputFormatClass(final 
JobConf job, final StorageDescriptor sd,
       final Table table) throws Exception {
     final String inputFormatName = sd.getInputFormat();
     if (Strings.isNullOrEmpty(inputFormatName)) {
@@ -374,9 +374,9 @@ public class HiveUtilities {
             "InputFormat class explicitly specified nor StorageHandler class");
       }
       final HiveStorageHandler storageHandler = 
HiveUtils.getStorageHandler(job, storageHandlerClass);
-      return storageHandler.getInputFormatClass();
+      return (Class<? extends InputFormat<?, ?>>) 
storageHandler.getInputFormatClass();
     } else {
-      return (Class<? extends InputFormat>) Class.forName(inputFormatName);
+      return (Class<? extends InputFormat<?, ?>>) 
Class.forName(inputFormatName) ;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
index 6f5c24e..21559c9 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
@@ -265,7 +265,7 @@ public class TestStorageBasedHiveAuthorization extends 
BaseTestHiveImpersonation
             u0_voter_all_755
         ));
 
-    showTablesHelper(db_u1g1_only, Collections.EMPTY_LIST);
+    showTablesHelper(db_u1g1_only, Collections.<String>emptyList());
   }
 
   @Test
@@ -289,7 +289,7 @@ public class TestStorageBasedHiveAuthorization extends 
BaseTestHiveImpersonation
             u1g1_voter_u1_700
         ));
 
-    showTablesHelper(db_u0_only, Collections.EMPTY_LIST);
+    showTablesHelper(db_u0_only, Collections.<String>emptyList());
   }
 
   @Test
@@ -309,7 +309,7 @@ public class TestStorageBasedHiveAuthorization extends 
BaseTestHiveImpersonation
             u1g1_voter_all_755
         ));
 
-    showTablesHelper(db_u0_only, Collections.EMPTY_LIST);
+    showTablesHelper(db_u0_only, Collections.<String>emptyList());
   }
 
   // Try to read the tables "user0" has access to read in db_general.

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java
 
b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java
index abd2ab7..541daa4 100644
--- 
a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java
+++ 
b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java
@@ -170,7 +170,6 @@ public class KuduRecordReader extends AbstractRecordReader {
     return rowCount;
   }
 
-  @SuppressWarnings("unchecked")
   private void initCols(Schema schema) throws SchemaChangeException {
     ImmutableList.Builder<ProjectedColumnInfo> pciBuilder = 
ImmutableList.builder();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java
 
b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java
index af2775d..34e5b2a 100644
--- 
a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java
+++ 
b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java
@@ -96,7 +96,7 @@ public class KuduSchemaFactory implements SchemaFactory {
         return Sets.newHashSet(tablesList.getTablesList());
       } catch (Exception e) {
         logger.warn("Failure reading kudu tables.", e);
-        return Collections.EMPTY_SET;
+        return Collections.emptySet();
       }
     }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
index 019f9ee..b0a9c3e 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java
@@ -112,7 +112,7 @@ public interface DistributedCache extends AutoCloseable{
       if (getClass() != obj.getClass()) {
         return false;
       }
-      CacheConfig other = (CacheConfig) obj;
+      CacheConfig<?, ?> other = (CacheConfig<?, ?>) obj;
       if (keyClass == null) {
         if (other.keyClass != null) {
           return false;

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
index 457b466..bdd1a5c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
@@ -145,7 +145,7 @@ public class CodeGenerator<T> {
     if (getClass() != obj.getClass()){
       return false;
     }
-    CodeGenerator other = (CodeGenerator) obj;
+    CodeGenerator<?> other = (CodeGenerator<?>) obj;
     if (definition == null) {
       if (other.definition != null){
         return false;

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index 3d38ac9..055ab84 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -24,7 +24,6 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
-import com.google.common.collect.Maps;
 import org.apache.drill.common.expression.BooleanOperator;
 import org.apache.drill.common.expression.CastExpression;
 import org.apache.drill.common.expression.ConvertExpression;
@@ -54,7 +53,6 @@ import 
org.apache.drill.common.expression.ValueExpressions.QuotedString;
 import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
 import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
 import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
-import org.apache.drill.common.expression.visitors.ExprVisitor;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -70,6 +68,7 @@ import org.apache.drill.exec.vector.ValueHolderHelper;
 import org.apache.drill.exec.vector.complex.reader.FieldReader;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import com.sun.codemodel.JBlock;
 import com.sun.codemodel.JClass;
 import com.sun.codemodel.JConditional;
@@ -133,11 +132,11 @@ public class EvaluationVisitor {
 
   Map<ExpressionHolder,HoldingContainer> previousExpressions = 
Maps.newHashMap();
 
-  Stack<Map<ExpressionHolder,HoldingContainer>> mapStack = new Stack();
+  Stack<Map<ExpressionHolder,HoldingContainer>> mapStack = new Stack<>();
 
   void newScope() {
     mapStack.push(previousExpressions);
-    previousExpressions = new HashMap(previousExpressions);
+    previousExpressions = new HashMap<>(previousExpressions);
   }
 
   void leaveScope() {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
index 0121c80..d3a5573 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
@@ -17,10 +17,10 @@
  */
 package org.apache.drill.exec.expr.fn.interpreter;
 
-import com.google.common.base.Preconditions;
-import io.netty.buffer.DrillBuf;
-
 import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+
+import javax.inject.Inject;
 
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.BooleanOperator;
@@ -52,8 +52,9 @@ import org.apache.drill.exec.record.VectorAccessible;
 import org.apache.drill.exec.vector.ValueHolderHelper;
 import org.apache.drill.exec.vector.ValueVector;
 
-import javax.inject.Inject;
-import java.lang.reflect.Method;
+import com.google.common.base.Preconditions;
+
+import io.netty.buffer.DrillBuf;
 
 public class InterpreterEvaluator {
 
@@ -111,7 +112,7 @@ public class InterpreterEvaluator {
         for (Field f : fields) {
           if ( f.getAnnotation(Inject.class) != null ) {
             f.setAccessible(true);
-            Class fieldType = f.getType();
+            Class<?> fieldType = f.getType();
             if (UdfUtilities.INJECTABLE_GETTER_METHODS.get(fieldType) != null) 
{
               Method method = 
udfUtilities.getClass().getMethod(UdfUtilities.INJECTABLE_GETTER_METHODS.get(fieldType));
               f.set(interpreter, method.invoke(udfUtilities));
@@ -427,7 +428,6 @@ public class InterpreterEvaluator {
     private ValueHolder visitBooleanAnd(BooleanOperator op, Integer inIndex) {
       ValueHolder [] args = new ValueHolder [op.args.size()];
       boolean hasNull = false;
-      ValueHolder out = null;
       for (int i = 0; i < op.args.size(); i++) {
         args[i] = op.args.get(i).accept(this, inIndex);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java
index 9c91331..5df2b1b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java
@@ -17,9 +17,11 @@
  
******************************************************************************/
 package org.apache.drill.exec.ops;
 
+import org.apache.drill.exec.store.PartitionExplorer;
+
 import com.google.common.collect.ImmutableMap;
+
 import io.netty.buffer.DrillBuf;
-import org.apache.drill.exec.store.PartitionExplorer;
 
 /**
  * Defines the query state and shared resources available to UDFs through
@@ -31,8 +33,8 @@ public interface UdfUtilities {
 
   // Map between injectable classes and their respective getter methods
   // used for code generation
-  public static final ImmutableMap<Class, String> INJECTABLE_GETTER_METHODS =
-      new ImmutableMap.Builder<Class, String>()
+  public static final ImmutableMap<Class<?>, String> INJECTABLE_GETTER_METHODS 
=
+      new ImmutableMap.Builder<Class<?>, String>()
           .put(DrillBuf.class, "getManagedBuffer")
           .put(PartitionExplorer.class, "getPartitionExplorer")
           .put(ContextInformation.class, "getContextInformation")

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java
index 5fbe838..688482d 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java
@@ -21,14 +21,15 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Maps;
 import org.apache.drill.exec.physical.EndpointAffinity;
 import org.apache.drill.exec.physical.PhysicalOperatorSetupException;
 import org.apache.drill.exec.planner.fragment.ParallelizationInfo;
 import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Maps;
+
 public abstract class AbstractExchange extends AbstractSingle implements 
Exchange {
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(AbstractExchange.class);
 
@@ -93,7 +94,7 @@ public abstract class AbstractExchange extends AbstractSingle 
implements Exchang
       }
     }
 
-    return new ArrayList(affinityMap.values());
+    return new ArrayList<>(affinityMap.values());
   }
 
   protected void setupSenders(List<DrillbitEndpoint> senderLocations) throws 
PhysicalOperatorSetupException {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
index 6fd6ce2..8a8a1ae 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
@@ -152,7 +152,7 @@ public class ImplCreator {
 
   /** Helper method to get OperatorCreator (RootCreator or BatchCreator) for 
given PhysicalOperator (root or non-root) */
   private Object getOpCreator(PhysicalOperator op, final FragmentContext 
context) throws ExecutionSetupException {
-    final Class opClass = op.getClass();
+    final Class<? extends PhysicalOperator> opClass = op.getClass();
     Object opCreator = 
context.getDrillbitContext().getOperatorCreatorRegistry().getOperatorCreator(opClass);
     if (opCreator == null) {
       throw new UnsupportedOperationException(

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
index 0049059..b1679e5 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
@@ -1,5 +1,3 @@
-package org.apache.drill.exec.physical.impl.mergereceiver;
-
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -17,8 +15,7 @@ package org.apache.drill.exec.physical.impl.mergereceiver;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
-import io.netty.buffer.ByteBuf;
+package org.apache.drill.exec.physical.impl.mergereceiver;
 
 import java.io.IOException;
 import java.util.Comparator;
@@ -83,6 +80,9 @@ import com.google.common.collect.Lists;
 import com.sun.codemodel.JConditional;
 import com.sun.codemodel.JExpr;
 
+
+import io.netty.buffer.ByteBuf;
+
 /**
  * The MergingRecordBatch merges pre-sorted record batches from remote senders.
  */
@@ -312,6 +312,7 @@ public class MergingRecordBatch extends 
AbstractRecordBatch<MergingReceiverPOP>
 
       // allocate the priority queue with the generated comparator
       this.pqueue = new PriorityQueue<>(fragProviders.length, new 
Comparator<Node>() {
+        @Override
         public int compare(final Node node1, final Node node2) {
           final int leftIndex = (node1.batchId << 16) + node1.valueIndex;
           final int rightIndex = (node2.batchId << 16) + node2.valueIndex;
@@ -663,7 +664,7 @@ public class MergingRecordBatch extends 
AbstractRecordBatch<MergingReceiverPOP>
   GeneratorMapping COPIER_MAPPING = new GeneratorMapping("doSetup", "doCopy", 
null, null);
   public final MappingSet COPIER_MAPPING_SET = new MappingSet(COPIER_MAPPING, 
COPIER_MAPPING);
 
-  private void generateComparisons(final ClassGenerator g, final 
VectorAccessible batch) throws SchemaChangeException {
+  private void generateComparisons(final ClassGenerator<?> g, final 
VectorAccessible batch) throws SchemaChangeException {
     g.setMappingSet(MAIN_MAPPING);
 
     for (final Ordering od : popConfig.getOrderings()) {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java
index b1468a1..042222a 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java
@@ -26,13 +26,13 @@ import java.util.concurrent.Future;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorStats;
 import org.apache.drill.exec.record.RecordBatch;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Lists;
 import org.apache.drill.exec.testing.ControlsInjector;
 import org.apache.drill.exec.testing.ControlsInjectorFactory;
 import org.apache.drill.exec.testing.CountDownLatchInjection;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+
 /**
  * Decorator class to hide multiple Partitioner existence from the caller
  * since this class involves multithreaded processing of incoming batches
@@ -151,7 +151,7 @@ public class PartitionerDecorator {
     stats.startWait();
     final CountDownLatch latch = new CountDownLatch(partitioners.size());
     final List<CustomRunnable> runnables = Lists.newArrayList();
-    final List<Future> taskFutures = Lists.newArrayList();
+    final List<Future<?>> taskFutures = Lists.newArrayList();
     CountDownLatchInjection testCountDownLatch = null;
     try {
       // To simulate interruption of main fragment thread and interrupting the 
partition threads, create a
@@ -179,7 +179,7 @@ public class PartitionerDecorator {
           // If the fragment state says we shouldn't continue, cancel or 
interrupt partitioner threads
           if (!context.shouldContinue()) {
             logger.debug("Interrupting partioner threads. Fragment thread {}", 
tName);
-            for(Future f : taskFutures) {
+            for(Future<?> f : taskFutures) {
               f.cancel(true);
             }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
index 85844c0..589754f 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java
@@ -25,7 +25,6 @@ import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.exception.OutOfMemoryException;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.physical.config.ProducerConsumer;
 import org.apache.drill.exec.physical.impl.sort.RecordBatchData;
@@ -39,7 +38,7 @@ import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.vector.ValueVector;
 
-public class ProducerConsumerBatch extends AbstractRecordBatch {
+public class ProducerConsumerBatch extends 
AbstractRecordBatch<ProducerConsumer> {
   private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(ProducerConsumerBatch.class);
 
   private final RecordBatch incoming;

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java
index 95432c9..04ab23e 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java
@@ -17,8 +17,9 @@
   */
 package org.apache.drill.exec.planner.logical.partition;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexCall;
@@ -35,8 +36,8 @@ import org.apache.calcite.rex.RexVisitorImpl;
 import org.apache.calcite.sql.SqlKind;
 import org.apache.calcite.sql.SqlOperator;
 
-import java.util.ArrayList;
-import java.util.List;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
 /**
  * Rewrites an expression tree, replacing OR and AND operators with more than 
2 operands with a chained operators
@@ -81,7 +82,7 @@ import java.util.List;
     RelDataType type = call.getType();
     if (kind == SqlKind.OR || kind == SqlKind.AND) {
       if (call.getOperands().size() > 2) {
-        List<RexNode> children = new ArrayList(call.getOperands());
+        List<RexNode> children = new ArrayList<>(call.getOperands());
         RexNode left = children.remove(0).accept(this);
         RexNode right = builder.makeCall(type, op, children).accept(this);
         return builder.makeCall(type, op, ImmutableList.of(left, right));

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
index 6605020..c69bb5f 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java
@@ -224,7 +224,7 @@ public class PrelUtil {
   }
 
   // Simple visitor class to determine the last used reference in the 
expression
-  private static class LastUsedRefVisitor extends RexVisitorImpl {
+  private static class LastUsedRefVisitor extends RexVisitorImpl<Void> {
 
     int lastUsedRef = -1;
 

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java
index 21a95b8..a5457fe 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java
@@ -17,14 +17,9 @@
  
******************************************************************************/
 package org.apache.drill.exec.planner.physical.visitor;
 
-import com.google.common.collect.Lists;
-import org.apache.calcite.tools.RelConversionException;
-import org.apache.drill.exec.planner.physical.FlattenPrel;
-import org.apache.drill.exec.planner.physical.Prel;
-import org.apache.drill.exec.planner.physical.ProjectPrel;
-import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
-import org.apache.drill.exec.planner.types.RelDataTypeHolder;
-import org.apache.drill.exec.planner.sql.DrillOperatorTable;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.calcite.rel.RelNode;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -32,9 +27,15 @@ import org.apache.calcite.rel.type.RelRecordType;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexCall;
 import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.tools.RelConversionException;
+import org.apache.drill.exec.planner.physical.FlattenPrel;
+import org.apache.drill.exec.planner.physical.Prel;
+import org.apache.drill.exec.planner.physical.ProjectPrel;
+import org.apache.drill.exec.planner.sql.DrillOperatorTable;
+import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
+import org.apache.drill.exec.planner.types.RelDataTypeHolder;
 
-import java.util.ArrayList;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 public class RewriteProjectToFlatten extends BasePrelVisitor<Prel, Object, 
RelConversionException> {
 
@@ -64,7 +65,7 @@ public class RewriteProjectToFlatten extends 
BasePrelVisitor<Prel, Object, RelCo
     List<RexNode> exprList = new ArrayList<>();
     boolean rewrite = false;
 
-    List<RelDataTypeField> relDataTypes = new ArrayList();
+    List<RelDataTypeField> relDataTypes = new ArrayList<>();
     int i = 0;
     RexNode flatttenExpr = null;
     for (RexNode rex : project.getChildExps()) {
@@ -72,7 +73,6 @@ public class RewriteProjectToFlatten extends 
BasePrelVisitor<Prel, Object, RelCo
       if (rex instanceof RexCall) {
         RexCall function = (RexCall) rex;
         String functionName = function.getOperator().getName();
-        int nArgs = function.getOperands().size();
 
         if (functionName.equalsIgnoreCase("flatten") ) {
           rewrite = true;

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java
index da82973..7d4a8e5 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java
@@ -17,10 +17,9 @@
  
******************************************************************************/
 package org.apache.drill.exec.planner.physical.visitor;
 
-import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
-import org.apache.drill.exec.planner.physical.ProjectPrel;
-import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
-import org.apache.drill.exec.planner.types.RelDataTypeHolder;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexCall;
@@ -34,9 +33,10 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexOver;
 import org.apache.calcite.rex.RexRangeRef;
 import org.apache.calcite.rex.RexVisitorImpl;
-
-import java.util.ArrayList;
-import java.util.List;
+import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.planner.physical.ProjectPrel;
+import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
+import org.apache.drill.exec.planner.types.RelDataTypeHolder;
 
 public class RexVisitorComplexExprSplitter extends RexVisitorImpl<RexNode> {
 
@@ -50,7 +50,7 @@ public class RexVisitorComplexExprSplitter extends 
RexVisitorImpl<RexNode> {
     super(true);
     this.factory = factory;
     this.funcReg = funcReg;
-    this.complexExprs = new ArrayList();
+    this.complexExprs = new ArrayList<>();
     this.lastUsedIndex = firstUnused;
   }
 
@@ -83,11 +83,12 @@ public class RexVisitorComplexExprSplitter extends 
RexVisitorImpl<RexNode> {
     return correlVariable;
   }
 
+  @Override
   public RexNode visitCall(RexCall call) {
 
     String functionName = call.getOperator().getName();
 
-    List<RexNode> newOps = new ArrayList();
+    List<RexNode> newOps = new ArrayList<>();
     for (RexNode operand : call.operands) {
       newOps.add(operand.accept(this));
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
index a3952cb..394cde3 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
@@ -17,31 +17,30 @@
  
******************************************************************************/
 package org.apache.drill.exec.planner.physical.visitor;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.calcite.rel.RelNode;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.rel.type.RelDataTypeField;
+import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
+import org.apache.calcite.rel.type.RelRecordType;
+import org.apache.calcite.rex.RexBuilder;
 import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.type.SqlTypeName;
 import org.apache.calcite.tools.RelConversionException;
-
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.planner.StarColumnHelper;
 import org.apache.drill.exec.planner.physical.Prel;
 import org.apache.drill.exec.planner.physical.PrelUtil;
 import org.apache.drill.exec.planner.physical.ProjectPrel;
+import org.apache.drill.exec.planner.sql.DrillOperatorTable;
 import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
 import org.apache.drill.exec.planner.types.RelDataTypeHolder;
-import org.apache.drill.exec.planner.sql.DrillOperatorTable;
-import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.type.RelDataTypeFactory;
-import org.apache.calcite.rel.type.RelDataTypeField;
-import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
-import org.apache.calcite.rel.type.RelRecordType;
-import org.apache.calcite.rex.RexBuilder;
-import org.apache.calcite.rex.RexNode;
-import org.apache.calcite.sql.type.SqlTypeName;
 
-import java.util.ArrayList;
-import java.util.List;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
 public class SplitUpComplexExpressions extends BasePrelVisitor<Prel, Object, 
RelConversionException> {
 
@@ -76,8 +75,8 @@ public class SplitUpComplexExpressions extends 
BasePrelVisitor<Prel, Object, Rel
 
     List<RexNode> exprList = new ArrayList<>();
 
-    List<RelDataTypeField> relDataTypes = new ArrayList();
-    List<RelDataTypeField> origRelDataTypes = new ArrayList();
+    List<RelDataTypeField> relDataTypes = new ArrayList<>();
+    List<RelDataTypeField> origRelDataTypes = new ArrayList<>();
     int i = 0;
     final int lastColumnReferenced = 
PrelUtil.getLastUsedColumnReference(project.getProjects());
 
@@ -101,7 +100,7 @@ public class SplitUpComplexExpressions extends 
BasePrelVisitor<Prel, Object, Rel
 
     ProjectPrel childProject;
 
-    List<RexNode> allExprs = new ArrayList();
+    List<RexNode> allExprs = new ArrayList<>();
     int exprIndex = 0;
     List<String> fieldNames = originalInput.getRowType().getFieldNames();
     for (int index = 0; index < lastRexInput; index++) {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
index 614ad2b..1dfc04d 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
@@ -22,14 +22,24 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.calcite.config.Lex;
+import org.apache.calcite.plan.ConventionTraitDef;
+import org.apache.calcite.plan.RelOptCostFactory;
+import org.apache.calcite.plan.RelTraitDef;
+import org.apache.calcite.plan.hep.HepPlanner;
+import org.apache.calcite.plan.hep.HepProgramBuilder;
+import org.apache.calcite.rel.RelCollationTraitDef;
 import org.apache.calcite.rel.rules.ProjectToWindowRule;
+import org.apache.calcite.rel.rules.ReduceExpressionsRule;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.calcite.sql.parser.SqlParser;
+import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.calcite.tools.FrameworkConfig;
 import org.apache.calcite.tools.Frameworks;
 import org.apache.calcite.tools.Planner;
 import org.apache.calcite.tools.RelConversionException;
 import org.apache.calcite.tools.RuleSet;
 import org.apache.calcite.tools.ValidationException;
-
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.ops.QueryContext;
 import org.apache.drill.exec.physical.PhysicalPlan;
@@ -47,22 +57,10 @@ import 
org.apache.drill.exec.planner.sql.parser.DrillSqlCall;
 import org.apache.drill.exec.planner.sql.parser.SqlCreateTable;
 import 
org.apache.drill.exec.planner.sql.parser.impl.DrillParserWithCompoundIdConverter;
 import org.apache.drill.exec.planner.types.DrillRelDataTypeSystem;
-import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.testing.ControlsInjector;
 import org.apache.drill.exec.testing.ControlsInjectorFactory;
 import org.apache.drill.exec.util.Pointer;
 import org.apache.drill.exec.work.foreman.ForemanSetupException;
-import org.apache.calcite.rel.RelCollationTraitDef;
-import org.apache.calcite.rel.rules.ReduceExpressionsRule;
-import org.apache.calcite.plan.ConventionTraitDef;
-import org.apache.calcite.plan.RelOptCostFactory;
-import org.apache.calcite.plan.RelTraitDef;
-import org.apache.calcite.plan.hep.HepPlanner;
-import org.apache.calcite.plan.hep.HepProgramBuilder;
-import org.apache.calcite.sql.SqlNode;
-import org.apache.calcite.sql.parser.SqlParseException;
-import org.apache.calcite.sql.parser.SqlParser;
-import org.apache.calcite.sql.parser.SqlParserPos;
 import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
 import org.apache.hadoop.security.AccessControlException;
 
@@ -80,7 +78,9 @@ public class DrillSqlWorker {
   private final QueryContext context;
 
   public DrillSqlWorker(QueryContext context) {
-    final List<RelTraitDef> traitDefs = new ArrayList<RelTraitDef>();
+    // Calcite is not fully generified
+    @SuppressWarnings("rawtypes")
+    final List<RelTraitDef> traitDefs = new ArrayList<>();
 
     traitDefs.add(ConventionTraitDef.INSTANCE);
     traitDefs.add(DrillDistributionTraitDef.INSTANCE);

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
index e23b353..1e1c18c 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java
@@ -91,7 +91,7 @@ public class DrillFixedRelDataTypeImpl extends 
RelDataTypeImpl {
 
   @Override
   public RelDataTypePrecedenceList getPrecedenceList() {
-    return new SqlTypeExplicitPrecedenceList((List) Collections.emptyList());
+    return new 
SqlTypeExplicitPrecedenceList(Collections.<SqlTypeName>emptyList());
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
index 25ce593..04c35c1 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java
@@ -73,7 +73,7 @@ public class RelDataTypeDrillImpl extends RelDataTypeImpl {
 
     @Override
     public RelDataTypePrecedenceList getPrecedenceList() {
-      return new SqlTypeExplicitPrecedenceList((List<SqlTypeName>) (List) 
Collections.emptyList());
+      return new 
SqlTypeExplicitPrecedenceList(Collections.<SqlTypeName>emptyList());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
index 45cbe66..377c7af 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
@@ -19,8 +19,6 @@ package org.apache.drill.exec.record;
 
 import org.apache.drill.exec.vector.ValueVector;
 
-import java.util.LinkedList;
-
 public class ExpandableHyperContainer extends VectorContainer {
 
   public ExpandableHyperContainer() {
@@ -30,12 +28,12 @@ public class ExpandableHyperContainer extends 
VectorContainer {
   public ExpandableHyperContainer(VectorAccessible batch) {
     super();
     if (batch.getSchema().getSelectionVectorMode() == 
BatchSchema.SelectionVectorMode.FOUR_BYTE) {
-      for (VectorWrapper w : batch) {
+      for (VectorWrapper<?> w : batch) {
         ValueVector[] hyperVector = w.getValueVectors();
         this.add(hyperVector, true);
       }
     } else {
-      for (VectorWrapper w : batch) {
+      for (VectorWrapper<?> w : batch) {
         ValueVector[] hyperVector = { w.getValueVector() };
         this.add(hyperVector, true);
       }
@@ -45,12 +43,12 @@ public class ExpandableHyperContainer extends 
VectorContainer {
   public void addBatch(VectorAccessible batch) {
     if (wrappers.size() == 0) {
       if (batch.getSchema().getSelectionVectorMode() == 
BatchSchema.SelectionVectorMode.FOUR_BYTE) {
-        for (VectorWrapper w : batch) {
+        for (VectorWrapper<?> w : batch) {
           ValueVector[] hyperVector = w.getValueVectors();
           this.add(hyperVector, true);
         }
       } else {
-        for (VectorWrapper w : batch) {
+        for (VectorWrapper<?> w : batch) {
           ValueVector[] hyperVector = { w.getValueVector() };
           this.add(hyperVector, true);
         }
@@ -59,14 +57,14 @@ public class ExpandableHyperContainer extends 
VectorContainer {
     }
     if (batch.getSchema().getSelectionVectorMode() == 
BatchSchema.SelectionVectorMode.FOUR_BYTE) {
       int i = 0;
-      for (VectorWrapper w : batch) {
-        HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) 
wrappers.get(i++);
+      for (VectorWrapper<?> w : batch) {
+        HyperVectorWrapper<?> hyperVectorWrapper = (HyperVectorWrapper<?>) 
wrappers.get(i++);
         hyperVectorWrapper.addVectors(w.getValueVectors());
       }
     } else {
       int i = 0;
-      for (VectorWrapper w : batch) {
-        HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) 
wrappers.get(i++);
+      for (VectorWrapper<?> w : batch) {
+        HyperVectorWrapper<?> hyperVectorWrapper = (HyperVectorWrapper<?>) 
wrappers.get(i++);
         hyperVectorWrapper.addVector(w.getValueVector());
       }
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
index 322339e..44c6b1a 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
@@ -17,16 +17,12 @@
  */
 package org.apache.drill.exec.record;
 
-import java.util.AbstractMap;
-
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.vector.ValueVector;
-import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.apache.drill.exec.vector.complex.AbstractMapVector;
 import org.apache.drill.exec.vector.complex.FieldIdUtil;
-import org.apache.drill.exec.vector.complex.MapVector;
 
 import com.google.common.base.Preconditions;
 
@@ -147,12 +143,13 @@ public class HyperVectorWrapper<T extends ValueVector> 
implements VectorWrapper<
    * Both this and destination must be of same type and have same number of 
vectors.
    * @param destination destination HyperVectorWrapper.
    */
+  @Override
   public void transfer(VectorWrapper<?> destination) {
     Preconditions.checkArgument(destination instanceof HyperVectorWrapper);
     
Preconditions.checkArgument(getField().getType().equals(destination.getField().getType()));
-    Preconditions.checkArgument(vectors.length == 
((HyperVectorWrapper)destination).vectors.length);
+    Preconditions.checkArgument(vectors.length == 
((HyperVectorWrapper<?>)destination).vectors.length);
 
-    ValueVector[] destionationVectors = 
((HyperVectorWrapper)destination).vectors;
+    ValueVector[] destionationVectors = 
((HyperVectorWrapper<?>)destination).vectors;
     for (int i = 0; i < vectors.length; ++i) {
       vectors[i].makeTransferPair(destionationVectors[i]).transfer();
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
index 1e8a52f..49562af 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
@@ -17,25 +17,12 @@
  */
 package org.apache.drill.exec.record;
 
-import com.google.common.collect.Lists;
-import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.types.TypeProtos.DataMode;
-import org.apache.drill.common.types.TypeProtos.MajorType;
-import org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder;
-import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.vector.ValueVector;
-import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.apache.drill.exec.vector.complex.AbstractMapVector;
 import org.apache.drill.exec.vector.complex.FieldIdUtil;
-import org.apache.drill.exec.vector.complex.ListVector;
-import org.apache.drill.exec.vector.complex.MapVector;
-import org.apache.drill.exec.vector.complex.UnionVector;
 
-import java.util.List;
 import com.google.common.base.Preconditions;
 
 public class SimpleVectorWrapper<T extends ValueVector> implements 
VectorWrapper<T>{
@@ -114,10 +101,11 @@ public class SimpleVectorWrapper<T extends ValueVector> 
implements VectorWrapper
     return FieldIdUtil.getFieldId(getValueVector(), id, expectedPath, false);
   }
 
+  @Override
   public void transfer(VectorWrapper<?> destination) {
     Preconditions.checkArgument(destination instanceof SimpleVectorWrapper);
     
Preconditions.checkArgument(getField().getType().equals(destination.getField().getType()));
-    
vector.makeTransferPair(((SimpleVectorWrapper)destination).vector).transfer();
+    
vector.makeTransferPair(((SimpleVectorWrapper<?>)destination).vector).transfer();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
index 45f0683..3a46e8e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java
@@ -41,7 +41,7 @@ public class DataRecord {
             }
         } else {
             if(isList) {
-                ((List)dataMap.get(fieldId)).add(data);
+                ((List<Object>)dataMap.get(fieldId)).add(data);
             } else {
                 throw new IllegalStateException("Overriding field id existing 
data!");
             }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
index d3e6107..05eed49 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java
@@ -42,8 +42,6 @@ import org.glassfish.jersey.server.mvc.Viewable;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 
-import static 
org.apache.drill.exec.server.rest.auth.DrillUserPrincipal.ADMIN_ROLE;
-
 @Path("/")
 @PermitAll
 public class StatusResources {
@@ -63,7 +61,7 @@ public class StatusResources {
   @Path("/options.json")
   @RolesAllowed(DrillUserPrincipal.AUTHENTICATED_ROLE)
   @Produces(MediaType.APPLICATION_JSON)
-  public List getSystemOptionsJSON() {
+  public List<OptionWrapper> getSystemOptionsJSON() {
     List<OptionWrapper> options = new LinkedList<>();
     for (OptionValue option : work.getContext().getOptionManager()) {
       options.add(new OptionWrapper(option.name, option.getValue(), 
option.type, option.kind));

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
index 84a584f..744f982 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java
@@ -17,16 +17,14 @@
  */
 package org.apache.drill.exec.store.avro;
 
-import io.netty.buffer.DrillBuf;
-
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.nio.ByteBuffer;
+import java.security.PrivilegedExceptionAction;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map.Entry;
 import java.util.concurrent.TimeUnit;
-import java.security.PrivilegedExceptionAction;
 
 import org.apache.avro.Schema;
 import org.apache.avro.Schema.Type;
@@ -52,11 +50,12 @@ import 
org.apache.drill.exec.vector.complex.impl.MapOrListWriterImpl;
 import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Stopwatch;
 
-import org.apache.hadoop.security.UserGroupInformation;
+import io.netty.buffer.DrillBuf;
 
 /**
  * A RecordReader implementation for Avro data files.
@@ -75,7 +74,6 @@ public class AvroRecordReader extends AbstractRecordReader {
   private VectorContainerWriter writer;
 
   private DataFileReader<GenericContainer> reader = null;
-  private OperatorContext operatorContext;
   private FileSystem fs;
 
   private final String opUserName;
@@ -102,12 +100,12 @@ public class AvroRecordReader extends 
AbstractRecordReader {
     this.fieldSelection = FieldSelection.getFieldSelection(projectedColumns);
   }
 
-  private DataFileReader getReader(final Path hadoop, final FileSystem fs) 
throws ExecutionSetupException {
+  private DataFileReader<GenericContainer> getReader(final Path hadoop, final 
FileSystem fs) throws ExecutionSetupException {
     try {
       final UserGroupInformation ugi = 
ImpersonationUtil.createProxyUgi(this.opUserName, this.queryUserName);
-      return ugi.doAs(new PrivilegedExceptionAction<DataFileReader>() {
+      return ugi.doAs(new 
PrivilegedExceptionAction<DataFileReader<GenericContainer>>() {
         @Override
-        public DataFileReader run() throws Exception {
+        public DataFileReader<GenericContainer> run() throws Exception {
           return new DataFileReader<>(new FsInput(hadoop, fs.getConf()), new 
GenericDatumReader<GenericContainer>());
         }
       });
@@ -119,7 +117,6 @@ public class AvroRecordReader extends AbstractRecordReader {
 
   @Override
   public void setup(final OperatorContext context, final OutputMutator output) 
throws ExecutionSetupException {
-    operatorContext = context;
     writer = new VectorContainerWriter(output);
 
     try {
@@ -202,7 +199,7 @@ public class AvroRecordReader extends AbstractRecordReader {
         break;
       case ARRAY:
         assert fieldName != null;
-        final GenericArray array = (GenericArray) value;
+        final GenericArray<?> array = (GenericArray<?>) value;
         Schema elementSchema = array.getSchema().getElementType();
         Type elementType = elementSchema.getType();
         if (elementType == Schema.Type.RECORD || elementType == 
Schema.Type.MAP){

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
index 2f69155..ed3decb 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
@@ -75,7 +75,7 @@ public class MockRecordReader extends AbstractRecordReader {
       for (int i = 0; i < config.getTypes().length; i++) {
         final MajorType type = config.getTypes()[i].getMajorType();
         final MaterializedField field = 
getVector(config.getTypes()[i].getName(), type, batchRecordCount);
-        final Class vvClass = 
TypeHelper.getValueVectorClass(field.getType().getMinorType(), 
field.getDataMode());
+        final Class<? extends ValueVector> vvClass = 
TypeHelper.getValueVectorClass(field.getType().getMinorType(), 
field.getDataMode());
         valueVectors[i] = output.addField(field, vvClass);
       }
     } catch (SchemaChangeException e) {

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
index 590c612..25b93b7 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
@@ -326,7 +326,7 @@ public class Metadata {
 
         boolean statsAvailable = (col.getStatistics() != null && 
!col.getStatistics().isEmpty());
 
-        Statistics stats = col.getStatistics();
+        Statistics<?> stats = col.getStatistics();
         String[] columnName = col.getPath().toArray();
         SchemaPath columnSchemaName = SchemaPath.getCompoundPath(columnName);
         ColumnTypeMetadata_v2 columnTypeMetadata =
@@ -1012,6 +1012,7 @@ public class Metadata {
       return nulls;
     }
 
+    @Override
     public boolean hasSingleValue() {
       return (mxValue != null);
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
index 6cccc8e..b8ae92c 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java
@@ -26,12 +26,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-
-import org.apache.calcite.util.Pair;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.logical.FormatPluginConfig;
@@ -87,21 +81,24 @@ import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
-import org.joda.time.DateTimeUtils;
 import org.apache.parquet.io.api.Binary;
+import org.apache.parquet.schema.OriginalType;
+import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
+import org.joda.time.DateTimeUtils;
 
 import com.codahale.metrics.MetricRegistry;
 import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Stopwatch;
+import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Lists;
-
-import org.apache.parquet.schema.OriginalType;
-import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 @JsonTypeName("parquet-scan")
 public class ParquetGroupScan extends AbstractFileGroupScan {
@@ -216,10 +213,10 @@ public class ParquetGroupScan extends 
AbstractFileGroupScan {
     this.rowCount = that.rowCount;
     this.rowGroupInfos = that.rowGroupInfos == null ? null : 
Lists.newArrayList(that.rowGroupInfos);
     this.selectionRoot = that.selectionRoot;
-    this.columnValueCounts = that.columnValueCounts == null ? null : new 
HashMap(that.columnValueCounts);
-    this.columnTypeMap = that.columnTypeMap == null ? null : new 
HashMap(that.columnTypeMap);
-    this.partitionValueMap = that.partitionValueMap == null ? null : new 
HashMap(that.partitionValueMap);
-    this.fileSet = that.fileSet == null ? null : new HashSet(that.fileSet);
+    this.columnValueCounts = that.columnValueCounts == null ? null : new 
HashMap<>(that.columnValueCounts);
+    this.columnTypeMap = that.columnTypeMap == null ? null : new 
HashMap<>(that.columnTypeMap);
+    this.partitionValueMap = that.partitionValueMap == null ? null : new 
HashMap<>(that.partitionValueMap);
+    this.fileSet = that.fileSet == null ? null : new HashSet<>(that.fileSet);
     this.usedMetadataCache = that.usedMetadataCache;
     this.parquetTableMetadata = that.parquetTableMetadata;
   }
@@ -907,6 +904,6 @@ public class ParquetGroupScan extends AbstractFileGroupScan 
{
 
   @Override
   public List<SchemaPath> getPartitionColumns() {
-    return new ArrayList(columnTypeMap.keySet());
+    return new ArrayList<>(columnTypeMap.keySet());
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/2ffe3117/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java
index b2a42dc..9b8a063 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java
@@ -19,16 +19,14 @@ package org.apache.drill.exec.store.parquet.columnreaders;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.exec.vector.BitVector;
-import org.apache.drill.exec.vector.ValueVector;
-
 import org.apache.parquet.column.ColumnDescriptor;
 import org.apache.parquet.format.SchemaElement;
 import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
 
-final class BitReader extends ColumnReader {
+final class BitReader extends ColumnReader<BitVector> {
 
   BitReader(ParquetRecordReader parentReader, int allocateSize, 
ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData,
-            boolean fixedLength, ValueVector v, SchemaElement schemaElement) 
throws ExecutionSetupException {
+            boolean fixedLength, BitVector v, SchemaElement schemaElement) 
throws ExecutionSetupException {
     super(parentReader, allocateSize, descriptor, columnChunkMetaData, 
fixedLength, v, schemaElement);
   }
 
@@ -53,7 +51,7 @@ final class BitReader extends ColumnReader {
     // benefit, for now this reader has been moved to use the higher level 
value
     // by value reader provided by the parquet library.
     for (int i = 0; i < recordsReadInThisIteration; i++){
-      ((BitVector)valueVec).getMutator().setSafe(i + valuesReadInCurrentPass,
+      valueVec.getMutator().setSafe(i + valuesReadInCurrentPass,
             pageReader.valueReader.readBoolean() ? 1 : 0 );
     }
   }

Reply via email to