DRILL-5116: Enable generated code debugging in each Drill operator

DRILL-5052 added the ability to debug generated code. The reviewer suggested
permitting the technique to be used for all Drill operators. This PR provides
the required fixes. Most were small changes, others dealt with the rather
clever way that the existing byte-code merge converted static nested classes
to non-static inner classes, with the way that constructors were inserted
at the byte-code level and so on. See the JIRA for the details.

This code passed the unit tests twice: once with the traditional byte-code
manipulations, a second time using "plain-old Java" code compilation.
Plain-old Java is turned off by default, but can be turned on for all
operators with a single config change: see the JIRA for info. Consider
the plain-old Java option to be experimental: very handy for debugging,
perhaps not quite tested enough for production use.

close apache/drill#716


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/ee399317
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/ee399317
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/ee399317

Branch: refs/heads/master
Commit: ee399317a1faa44e18aedcb11cfa5d4d5c0941aa
Parents: 4d4e0c2
Author: Paul Rogers <prog...@maprtech.com>
Authored: Mon Dec 12 17:30:56 2016 -0800
Committer: Jinfeng Ni <j...@apache.org>
Committed: Tue Jan 10 16:40:32 2017 -0800

----------------------------------------------------------------------
 .../apache/drill/exec/compile/ClassBuilder.java |  80 ++++++----
 .../exec/compile/ClassCompilerSelector.java     |  33 ++--
 .../drill/exec/compile/ClassTransformer.java    |  10 +-
 .../apache/drill/exec/compile/CodeCompiler.java | 150 ++++++++++++++++---
 .../apache/drill/exec/compile/MergeAdapter.java |  16 +-
 .../drill/exec/compile/QueryClassLoader.java    |  17 ++-
 .../drill/exec/compile/sig/SignatureHolder.java |  10 +-
 .../apache/drill/exec/expr/ClassGenerator.java  | 142 +++++++++++++++++-
 .../apache/drill/exec/expr/CodeGenerator.java   |  91 +++++++----
 .../physical/config/OrderedPartitionSender.java |   2 +-
 .../impl/TopN/PriorityQueueTemplate.java        |  31 +++-
 .../exec/physical/impl/TopN/TopNBatch.java      |  20 ++-
 .../physical/impl/aggregate/HashAggBatch.java   |  17 ++-
 .../impl/aggregate/HashAggTemplate.java         |  63 ++++----
 .../impl/aggregate/StreamingAggBatch.java       |  14 +-
 .../impl/aggregate/StreamingAggTemplate.java    |   5 +-
 .../physical/impl/common/ChainedHashTable.java  |   8 +-
 .../physical/impl/common/HashTableTemplate.java |  31 ++--
 .../physical/impl/filter/FilterRecordBatch.java |  14 +-
 .../physical/impl/filter/FilterTemplate2.java   |  20 ++-
 .../exec/physical/impl/filter/Filterer.java     |   5 +-
 .../impl/flatten/FlattenRecordBatch.java        |   8 +-
 .../physical/impl/flatten/FlattenTemplate.java  |  18 ++-
 .../exec/physical/impl/join/HashJoinBatch.java  |   5 +-
 .../exec/physical/impl/join/MergeJoinBatch.java |   5 +-
 .../physical/impl/join/NestedLoopJoinBatch.java |   5 +-
 .../MergingReceiverGeneratorBase.java           |   6 +-
 .../mergereceiver/MergingReceiverTemplate.java  |  12 +-
 .../impl/mergereceiver/MergingRecordBatch.java  |  26 +++-
 .../OrderedPartitionProjectorTemplate.java      |  38 ++---
 .../OrderedPartitionRecordBatch.java            |  26 +++-
 .../OrderedPartitionSenderCreator.java          |   1 +
 .../PartitionSenderRootExec.java                |   5 +-
 .../partitionsender/PartitionerTemplate.java    |  55 +++++--
 .../impl/project/ProjectRecordBatch.java        |  11 +-
 .../impl/project/ProjectorTemplate.java         |  24 ++-
 .../exec/physical/impl/sort/SortBatch.java      |   8 +-
 .../exec/physical/impl/svremover/Copier.java    |   8 +-
 .../impl/svremover/CopierTemplate2.java         |  12 +-
 .../impl/svremover/CopierTemplate4.java         |  16 +-
 .../impl/svremover/RemovingRecordBatch.java     |  33 ++--
 .../impl/union/UnionAllRecordBatch.java         |   9 +-
 .../physical/impl/union/UnionAllerTemplate.java |  26 ++--
 .../impl/window/NoFrameSupportTemplate.java     |  97 ++++++++----
 .../impl/window/WindowFrameRecordBatch.java     |  25 +++-
 .../exec/physical/impl/window/WindowFramer.java |  16 +-
 .../physical/impl/xsort/ExternalSortBatch.java  |  22 ++-
 .../exec/physical/impl/xsort/MSortTemplate.java |  17 ++-
 .../physical/impl/xsort/SingleBatchSorter.java  |   4 +-
 .../impl/xsort/SingleBatchSorterTemplate.java   |  12 +-
 .../drill/exec/server/DrillbitContext.java      |   3 +-
 .../src/main/resources/drill-module.conf        |   6 +-
 .../java/org/apache/drill/TestUnionAll.java     |   4 +-
 .../org/apache/drill/TestUnionDistinct.java     |   4 +-
 .../apache/drill/exec/compile/ExampleInner.java |   8 +-
 .../exec/compile/ExampleTemplateWithInner.java  |  80 ++++++++--
 .../exec/compile/TestClassTransformation.java   |  27 ++--
 .../physical/impl/TestConvertFunctions.java     |  35 ++++-
 .../exec/physical/impl/agg/TestHashAggr.java    |   2 +-
 .../exec/physical/impl/flatten/TestFlatten.java |   8 +-
 .../partitionsender/TestPartitionSender.java    |  10 +-
 .../physical/impl/union/TestSimpleUnion.java    |   4 +-
 .../physical/unit/PhysicalOpUnitTestBase.java   |  30 ++--
 63 files changed, 1135 insertions(+), 415 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassBuilder.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassBuilder.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassBuilder.java
index f5024fe..ec039ae 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassBuilder.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassBuilder.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -23,14 +23,18 @@ import java.io.IOException;
 import java.util.Map;
 
 import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.util.DrillStringUtils;
 import org.apache.drill.exec.compile.ClassTransformer.ClassNames;
 import org.apache.drill.exec.exception.ClassTransformationException;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.server.options.OptionManager;
 import org.codehaus.commons.compiler.CompileException;
+import org.objectweb.asm.tree.ClassNode;
+
+import com.google.common.collect.Maps;
 
 /**
- * Implements the "plain-old Java" method of code generation and
+ * Implements the "plain Java" method of code generation and
  * compilation. Given a {@link CodeGenerator}, obtains the generated
  * source code, compiles it with the selected compiler, loads the
  * byte-codes into a class loader and provides the resulting
@@ -41,20 +45,23 @@ import org.codehaus.commons.compiler.CompileException;
  * so that the JVM can use normal Java inheritance to associate the
  * template and generated methods.
  * <p>
- * Here is how to use the plain-old Java technique to debug
+ * Here is how to use the plain Java technique to debug
  * generated code:
  * <ul>
- * <li>Set the config option <var>drill.exec.compile.save_source</var>
- * to <var>true</var>.</li>
- * <li>Set the config option <var>drill.exec.compile.code_dir</var>
+ * <li>Set the config option <tt>drill.exec.compile.code_dir</tt>
  * to the location where you want to save the generated source
  * code.</li>
  * <li>Where you generate code (using a {@link CodeGenerator}),
- * set the "plain-old Java" options:<pre>
+ * set the "plain Java" options:<pre>
  * CodeGenerator&lt;Foo> cg = ...
- * cg.plainOldJavaCapable(true); // Class supports plain-old Java
- * cg.preferPlainOldJava(true); // Actually generate plain-old Java
- * ...</pre></li>
+ * cg.plainJavaCapable(true); // Class supports plain Java
+ * cg.preferPlainJava(true); // Actually generate plain Java
+ * cg.saveCodeForDebugging(true); // Save code for debugging
+ * ...</pre>
+ * Note that <tt>saveCodeForDebugging</tt> automatically sets the PJ
+ * option if the generator is capable. Call <tt>preferPlainJava</tt>
+ * only if you want to try PJ for this particular generated class
+ * without saving the generated code.</li>
  * <li>In your favorite IDE, add to the code lookup path the
  * code directory saved earlier. In Eclipse, for example, you do
  * this in the debug configuration you will use to debug Drill.</li>
@@ -64,46 +71,44 @@ import org.codehaus.commons.compiler.CompileException;
  * local variables. Have fun!</li>
  * </ul>
  * <p>
- * Note: not all generated code is ready to be compiled as plain-old
- * Java. Some classes omit from the template the proper <code>throws</code>
- * declarations. Other minor problems may also crop up. All are easy
- * to fix. Once you've done so, add the following to mark that you've
- * done the clean-up:<pre>
- * cg.plainOldJavaCapable(true); // Class supports plain-old Java</pre>
+ * Most generated classes have been upgraded to support Plain Java
+ * compilation. Once this work is complete, the calls to
+ * <tt>plainJavaCapable<tt> can be removed as all generated classes
+ * will be capable.
  * <p>
- * The setting to prefer plain-old Java is ignored for generated
- * classes not marked as plain-old Java capable.
+ * The setting to prefer plain Java is ignored for any remaining generated
+ * classes not marked as plain Java capable.
  */
 
 public class ClassBuilder {
 
-  public static final String SAVE_CODE_OPTION = CodeCompiler.COMPILE_BASE + 
".save_source";
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(ClassBuilder.class);
   public static final String CODE_DIR_OPTION = CodeCompiler.COMPILE_BASE + 
".code_dir";
 
   private final DrillConfig config;
   private final OptionManager options;
-  private final boolean saveCode;
   private final File codeDir;
 
   public ClassBuilder(DrillConfig config, OptionManager optionManager) {
     this.config = config;
     options = optionManager;
 
-    // The option to save code is a boot-time option because
-    // it is used selectively during debugging, but can cause
-    // excessive I/O in a running server if used to save all code.
+    // Code can be saved per-class to enable debugging.
+    // Just mark the code generator as to be persisted,
+    // point your debugger to the directory set below, and you
+    // can step into the code for debugging. Code is not saved
+    // be default because doing so is expensive and unnecessary.
 
-    saveCode = config.getBoolean(SAVE_CODE_OPTION);
     codeDir = new File(config.getString(CODE_DIR_OPTION));
   }
 
   /**
-   * Given a code generator which has already generated plain-old Java
+   * Given a code generator which has already generated plain Java
    * code, compile the code, create a class loader, and return the
    * resulting Java class.
    *
-   * @param cg a plain-old Java capable code generator that has generated
-   * plain-old Java code
+   * @param cg a plain Java capable code generator that has generated
+   * plain Java code
    * @return the class that the code generator defines
    * @throws ClassTransformationException
    */
@@ -127,9 +132,11 @@ public class ClassBuilder {
    * @throws ClassTransformationException generic "something is wrong" error 
from
    * Drill class compilation code.
    */
+  @SuppressWarnings("resource")
   private Class<?> compileClass(CodeGenerator<?> cg) throws IOException, 
CompileException, ClassNotFoundException, ClassTransformationException {
+    final long t1 = System.nanoTime();
 
-    // Get the plain-old Java code.
+    // Get the plain Java code.
 
     String code = cg.getGeneratedCode();
 
@@ -141,7 +148,9 @@ public class ClassBuilder {
     // A key advantage of this method is that the code can be
     // saved and debugged, if needed.
 
-    saveCode(code, name);
+    if (cg.isCodeToBeSaved()) {
+      saveCode(code, name);
+    }
 
     // Compile the code and load it into a class loader.
 
@@ -150,6 +159,15 @@ public class ClassBuilder {
     Map<String,byte[]> results = compilerSelector.compile(name, code);
     classLoader.addClasses(results);
 
+    long totalBytecodeSize = 0;
+    for (byte[] clazz : results.values()) {
+      totalBytecodeSize += clazz.length;
+    }
+    logger.debug("Compiled {}: bytecode size = {}, time = {} ms.",
+                 cg.getClassName(),
+                  DrillStringUtils.readable(totalBytecodeSize),
+                  (System.nanoTime() - t1 + 500_000) / 1_000_000);
+
     // Get the class from the class loader.
 
     try {
@@ -173,10 +191,6 @@ public class ClassBuilder {
 
   private void saveCode(String code, ClassNames name) {
 
-    // Skip if we don't want to save the code.
-
-    if (! saveCode) { return; }
-
     String pathName = name.slash + ".java";
     File codeFile = new File(codeDir, pathName);
     codeFile.getParentFile().mkdirs();

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassCompilerSelector.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassCompilerSelector.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassCompilerSelector.java
index c8afbc6..86c9a9b 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassCompilerSelector.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassCompilerSelector.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -63,6 +63,8 @@ import org.codehaus.commons.compiler.CompileException;
  */
 
 public class ClassCompilerSelector {
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(ClassCompilerSelector.class);
+
   public enum CompilerPolicy {
     DEFAULT, JDK, JANINO;
   }
@@ -101,16 +103,18 @@ public class ClassCompilerSelector {
 
   public ClassCompilerSelector(ClassLoader classLoader, DrillConfig config, 
OptionManager sessionOptions) {
     OptionValue value = sessionOptions.getOption(JAVA_COMPILER_OPTION);
-    this.policy = CompilerPolicy.valueOf((value != null) ? 
value.string_val.toUpperCase() : 
config.getString(JAVA_COMPILER_CONFIG).toUpperCase());
+    policy = CompilerPolicy.valueOf((value != null) ? 
value.string_val.toUpperCase() : 
config.getString(JAVA_COMPILER_CONFIG).toUpperCase());
 
     value = sessionOptions.getOption(JAVA_COMPILER_JANINO_MAXSIZE_OPTION);
-    this.janinoThreshold = (value != null) ? value.num_val : 
config.getLong(JAVA_COMPILER_JANINO_MAXSIZE_CONFIG);
+    janinoThreshold = (value != null) ? value.num_val : 
config.getLong(JAVA_COMPILER_JANINO_MAXSIZE_CONFIG);
 
     value = sessionOptions.getOption(JAVA_COMPILER_DEBUG_OPTION);
     boolean debug = (value != null) ? value.bool_val : 
config.getBoolean(JAVA_COMPILER_DEBUG_CONFIG);
 
-    this.janinoClassCompiler = (policy == CompilerPolicy.JANINO || policy == 
CompilerPolicy.DEFAULT) ? new JaninoClassCompiler(classLoader, debug) : null;
-    this.jdkClassCompiler = (policy == CompilerPolicy.JDK || policy == 
CompilerPolicy.DEFAULT) ? JDKClassCompiler.newInstance(classLoader, debug) : 
null;
+    janinoClassCompiler = (policy == CompilerPolicy.JANINO || policy == 
CompilerPolicy.DEFAULT) ? new JaninoClassCompiler(classLoader, debug) : null;
+    jdkClassCompiler = (policy == CompilerPolicy.JDK || policy == 
CompilerPolicy.DEFAULT) ? JDKClassCompiler.newInstance(classLoader, debug) : 
null;
+
+    logger.info(String.format("Java compiler policy: %s, Debug option: %b", 
policy, debug));
   }
 
   byte[][] getClassByteCode(ClassNames className, String sourceCode)
@@ -119,13 +123,20 @@ public class ClassCompilerSelector {
     byte[][] bc = getCompiler(sourceCode).getClassByteCode(className, 
sourceCode);
 
     // Uncomment the following to save the generated byte codes.
-
-//    final String baseDir = System.getProperty("java.io.tmpdir") + 
File.separator + className;
-//    File classFile = new File(baseDir + className.clazz);
-//    classFile.getParentFile().mkdirs();
-//    try (BufferedOutputStream out = new BufferedOutputStream(new 
FileOutputStream(classFile))) {
-//      out.write(bc[0]);
+    // Use the JDK javap command to view the generated code.
+    // This is the code from the compiler before byte code manipulations.
+    // For a similar block to display byte codes after manipulation,
+    // see QueryClassLoader.
+
+//    final File baseDir = new File( new File( 
System.getProperty("java.io.tmpdir") ), "classes" );
+//    for ( int i = 0;  i < bc.length;  i++ ) {
+//      File classFile = new File( baseDir, className.slash + i + ".class" );
+//      classFile.getParentFile().mkdirs();
+//      try (BufferedOutputStream out = new BufferedOutputStream(new 
FileOutputStream(classFile))) {
+//        out.write(bc[i]);
+//      }
 //    }
+//    System.out.println( "Classes saved to: " + baseDir.getAbsolutePath() );
 
     return bc;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
index 3c3c30e..f348e95 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/ClassTransformer.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -44,7 +44,7 @@ import com.google.common.collect.Sets;
  * Compiles generated code, merges the resulting class with the
  * template class, and performs byte-code cleanup on the resulting
  * byte codes. The most important transform is scalar replacement
- * which replaces occurences of non-escaping objects with a
+ * which replaces occurrences of non-escaping objects with a
  * collection of member variables.
  */
 
@@ -221,6 +221,7 @@ public class ClassTransformer {
     }
   }
 
+  @SuppressWarnings("resource")
   public Class<?> getImplementationClass(CodeGenerator<?> cg) throws 
ClassTransformationException {
     final QueryClassLoader loader = new QueryClassLoader(config, 
optionManager);
     return getImplementationClass(loader, cg.getDefinition(),
@@ -310,7 +311,10 @@ public class ClassTransformer {
 
       Class<?> c = classLoader.findClass(set.generated.dot);
       if (templateDefinition.getExternalInterface().isAssignableFrom(c)) {
-        logger.debug("Done compiling (bytecode size={}, time:{} millis).", 
DrillStringUtils.readable(totalBytecodeSize), (System.nanoTime() - t1) / 
1000000);
+        logger.debug("Compiled and merged {}: bytecode size = {}, time = {} 
ms.",
+             c.getSimpleName(),
+             DrillStringUtils.readable(totalBytecodeSize),
+             (System.nanoTime() - t1 + 500_000) / 1_000_000);
         return c;
       }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/CodeCompiler.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/CodeCompiler.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/CodeCompiler.java
index fb59a4c..75ed720 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/CodeCompiler.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/CodeCompiler.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,9 +17,7 @@
  */
 package org.apache.drill.exec.compile;
 
-import java.io.IOException;
 import java.util.List;
-import java.util.concurrent.ExecutionException;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.exec.exception.ClassTransformationException;
@@ -41,12 +39,89 @@ import com.google.common.collect.Lists;
  */
 
 public class CodeCompiler {
+  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(CodeCompiler.class);
+
+  /**
+   * Abstracts out the details of compiling code using the two available
+   * mechanisms. Allows this mechanism to be unit tested separately from
+   * the code cache.
+   */
+
+  public static class CodeGenCompiler {
+    private final ClassTransformer transformer;
+    private final ClassBuilder classBuilder;
+
+    public CodeGenCompiler(final DrillConfig config, final OptionManager 
optionManager) {
+      transformer = new ClassTransformer(config, optionManager);
+      classBuilder = new ClassBuilder(config, optionManager);
+    }
+
+    /**
+     * Compile the code already generated by the code generator.
+     *
+     * @param cg the code generator for the class
+     * @return the compiled class
+     * @throws Exception if anything goes wrong
+     */
+
+    public Class<?> compile(final CodeGenerator<?> cg) throws Exception {
+       if (cg.isPlainJava()) {
+
+        // Generate class as plain-old Java
+
+         logger.trace(String.format("Class %s generated as plain Java", 
cg.getClassName()));
+        return classBuilder.getImplementationClass(cg);
+      } else {
+
+        // Generate class parts and assemble byte-codes.
+
+        logger.trace(String.format("Class %s generated via byte-code 
manipulation", cg.getClassName()));
+        return transformer.getImplementationClass(cg);
+      }
+    }
+
+    /**
+     * Generate code for the code generator, then compile it.
+     *
+     * @param cg the code generator for the class
+     * @return the compiled class
+     * @throws Exception if anything goes wrong
+     */
+
+    public Class<?> generateAndCompile(final CodeGenerator<?> cg) throws 
Exception {
+      cg.generate();
+      return compile(cg);
+    }
+  }
 
   public static final String COMPILE_BASE = "drill.exec.compile";
+
+  /**
+   * Maximum size of the compiled class cache.
+   */
+
   public static final String MAX_LOADING_CACHE_SIZE_CONFIG = COMPILE_BASE + 
".cache_max_size";
 
-  private final ClassTransformer transformer;
-  private final ClassBuilder classBuilder;
+  /**
+   * Disables the code cache. Primarily for testing.
+   */
+
+  public static final String DISABLE_CACHE_CONFIG = COMPILE_BASE + 
".disable_cache";
+
+  /**
+   * Prefer to generate code as plain Java when the code generator
+   * supports that mechanism.
+   */
+
+  public static final String PREFER_POJ_CONFIG = CodeCompiler.COMPILE_BASE + 
".prefer_plain_java";
+
+  private final CodeGenCompiler codeGenCompiler;
+  private final boolean useCache;
+
+  // Metrics
+
+  private int classGenCount;
+  private int cacheMissCount;
 
   /**
    * Google Guava loading cache that defers creating a cache
@@ -57,14 +132,16 @@ public class CodeCompiler {
    */
 
   private final LoadingCache<CodeGenerator<?>, GeneratedClassEntry> cache;
+  private final boolean preferPlainJava;
 
   public CodeCompiler(final DrillConfig config, final OptionManager 
optionManager) {
-    transformer = new ClassTransformer(config, optionManager);
-    classBuilder = new ClassBuilder(config, optionManager);
-    final int cacheMaxSize = config.getInt(MAX_LOADING_CACHE_SIZE_CONFIG);
+    codeGenCompiler = new CodeGenCompiler(config, optionManager);
+    useCache = ! config.getBoolean(DISABLE_CACHE_CONFIG);
     cache = CacheBuilder.newBuilder()
-        .maximumSize(cacheMaxSize)
+        .maximumSize(config.getInt(MAX_LOADING_CACHE_SIZE_CONFIG))
         .build(new Loader());
+    preferPlainJava = config.getBoolean(PREFER_POJ_CONFIG);
+    logger.info(String.format("Plain java code generation preferred: %b", 
preferPlainJava));
   }
 
   /**
@@ -93,15 +170,25 @@ public class CodeCompiler {
 
   @SuppressWarnings("unchecked")
   public <T> List<T> createInstances(final CodeGenerator<?> cg, int count) 
throws ClassTransformationException {
+    if (preferPlainJava && cg.supportsPlainJava()) {
+      cg.preferPlainJava(true);
+    }
     cg.generate();
+    classGenCount++;
     try {
-      final GeneratedClassEntry ce = cache.get(cg);
+      final GeneratedClassEntry ce;
+      if (useCache) {
+        ce = cache.get(cg);
+        logger.trace(String.format("Class %s found in code cache", 
cg.getClassName()));
+      } else {
+        ce = makeClass(cg);
+      }
       List<T> tList = Lists.newArrayList();
-      for ( int i = 0; i < count; i++) {
+      for (int i = 0; i < count; i++) {
         tList.add((T) ce.clazz.newInstance());
       }
       return tList;
-    } catch (ExecutionException | InstantiationException | 
IllegalAccessException e) {
+    } catch (Exception e) {
       throw new ClassTransformationException(e);
     }
   }
@@ -117,18 +204,24 @@ public class CodeCompiler {
   private class Loader extends CacheLoader<CodeGenerator<?>, 
GeneratedClassEntry> {
     @Override
     public GeneratedClassEntry load(final CodeGenerator<?> cg) throws 
Exception {
-      final Class<?> c;
-      if ( cg.isPlainOldJava( ) ) {
-        // Generate class as plain old Java
+      return makeClass(cg);
+    }
+  }
 
-        c = classBuilder.getImplementationClass(cg);
-      } else {
-        // Generate class parts and assemble byte-codes.
+  /**
+   * Called when the requested class does not exist in the cache and should
+   * be compiled using the preferred code generation technique.
+   *
+   * @param cg the code generator for the class
+   * @return a cache entry for the class. The entry holds the class and the
+   * class holds onto its class loader (that is used to load any nested 
classes).
+   * @throws Exception if anything goes wrong with compilation or byte-code
+   * merge
+   */
 
-        c = transformer.getImplementationClass(cg);
-      }
-      return new GeneratedClassEntry(c);
-    }
+  private GeneratedClassEntry makeClass(final CodeGenerator<?> cg) throws 
Exception {
+    cacheMissCount++;
+    return new GeneratedClassEntry(codeGenCompiler.compile(cg));
   }
 
   private class GeneratedClassEntry {
@@ -153,4 +246,17 @@ public class CodeCompiler {
   public void flushCache() {
     cache.invalidateAll();
   }
+
+  /**
+   * Upon close, report the effectiveness of the code cache to the log.
+   */
+
+  public void close() {
+    int hitRate = 0;
+    if (classGenCount > 0) {
+      hitRate = (int) Math.round((classGenCount - cacheMissCount) * 100.0 / 
classGenCount);
+    }
+    logger.info(String.format("Stats: code gen count: %d, cache miss count: 
%d, hit rate: %d%%",
+                classGenCount, cacheMissCount, hitRate));
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
index 05e8ac1..3a01dda 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
@@ -141,7 +141,21 @@ class MergeAdapter extends ClassVisitor {
   public void visitEnd() {
     // add all the fields of the class we're going to merge.
     for (Iterator<?> it = classToMerge.fields.iterator(); it.hasNext();) {
-      ((FieldNode) it.next()).accept(this);
+
+      // Special handling for nested classes. Drill uses non-static nested
+      // "inner" classes in some templates. Prior versions of Drill would
+      // create the generated nested classes as static, then this line
+      // would copy the "this$0" field to convert the static nested class
+      // into a non-static inner class. However, that approach is not
+      // compatible with plain-old Java compilation. Now, Drill generates
+      // the nested classes as non-static inner classes. As a result, we
+      // do not want to copy the hidden fields; we'll end up with two if
+      // we do.
+
+      FieldNode field = (FieldNode) it.next();
+      if (! field.name.startsWith("this$")) {
+        field.accept(this);
+      }
     }
 
     // add all the methods that we to include.

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
index 31b464b..e71020c 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/QueryClassLoader.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -58,6 +58,21 @@ public class QueryClassLoader extends URLClassLoader {
       throw new IOException(String.format("The class defined %s has already 
been loaded.", className));
     }
     customClasses.put(className, classBytes);
+
+    // Uncomment the following to save the generated byte codes.
+    // Use the JDK javap command to view the generated code.
+    // This is the code after byte code manipulations. See
+    // ClassCompilerSelector for a similar block to view the byte
+    // codes before manipulation.
+
+//    final File baseDir = new File( new File( 
System.getProperty("java.io.tmpdir") ), "classes" );
+//    String path = className.replace( '.', '/' );
+//    File classFile = new File( baseDir, path + ".class" );
+//    classFile.getParentFile().mkdirs();
+//    try (BufferedOutputStream out = new BufferedOutputStream(new 
FileOutputStream(classFile))) {
+//      out.write(classBytes);
+//    }
+//    System.out.println( "Classes saved to: " + baseDir.getAbsolutePath() );
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
index 541a85f..7363c50 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/sig/SignatureHolder.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,7 +19,6 @@ package org.apache.drill.exec.compile.sig;
 
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
@@ -45,6 +44,13 @@ public class SignatureHolder implements 
Iterable<CodeGeneratorMethod> {
   public static SignatureHolder getHolder(Class<?> signature) {
     List<SignatureHolder> innerClasses = Lists.newArrayList();
     for (Class<?> inner : signature.getClasses()) {
+
+      // Do not generate classes for nested enums.
+      // (Occurs in HashAggTemplate.)
+
+      if (inner.isEnum()) {
+        continue;
+      }
       SignatureHolder h = getHolder(inner);
       if (h.childHolders.length > 0 || h.methods.length > 0) {
         innerClasses.add(h);

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
index 96f14fb..0b6adaa 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,6 +19,7 @@ package org.apache.drill.exec.expr;
 
 import static org.apache.drill.exec.compile.sig.GeneratorMapping.GM;
 
+import java.lang.reflect.Constructor;
 import java.lang.reflect.Modifier;
 import java.util.LinkedList;
 import java.util.List;
@@ -42,6 +43,7 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.sun.codemodel.JBlock;
+import com.sun.codemodel.JCatchBlock;
 import com.sun.codemodel.JClass;
 import com.sun.codemodel.JClassAlreadyExistsException;
 import com.sun.codemodel.JCodeModel;
@@ -53,6 +55,7 @@ import com.sun.codemodel.JInvocation;
 import com.sun.codemodel.JLabel;
 import com.sun.codemodel.JMethod;
 import com.sun.codemodel.JMod;
+import com.sun.codemodel.JTryBlock;
 import com.sun.codemodel.JType;
 import com.sun.codemodel.JVar;
 import org.apache.drill.exec.server.options.OptionManager;
@@ -105,8 +108,19 @@ public class ClassGenerator<T>{
     rotateBlock();
 
     for (SignatureHolder child : signature.getChildHolders()) {
-      String innerClassName = child.getSignatureClass().getSimpleName();
-      JDefinedClass innerClazz = clazz._class(Modifier.FINAL + 
Modifier.PRIVATE, innerClassName);
+      Class<?> innerClass = child.getSignatureClass();
+      String innerClassName = innerClass.getSimpleName();
+
+      // Create the inner class as private final. If the template (super) class
+      // is static, then make the subclass static as well. Note the conversion
+      // from the JDK Modifier values to the JCodeModel JMod values: the
+      // values are different.
+
+      int mods = JMod.PRIVATE + JMod.FINAL;
+      if ((innerClass.getModifiers() & Modifier.STATIC) != 0) {
+        mods += JMod.STATIC;
+      }
+      JDefinedClass innerClazz = clazz._class(mods, innerClassName);
       innerClasses.put(innerClassName, new ClassGenerator<>(codeGenerator, 
mappingSet, child, eval, innerClazz, model, optionManager));
     }
   }
@@ -374,6 +388,128 @@ public class ClassGenerator<T>{
     return this.workspaceVectors;
   }
 
+  /**
+   * Prepare the generated class for use as a plain-old Java class
+   * (to be compiled by a compiler and directly loaded without a
+   * byte-code merge. Three additions are necessary:
+   * <ul>
+   * <li>The class must extend its template as we won't merge byte
+   * codes.</li>
+   * <li>A constructor is required to call the <tt>__DRILL_INIT__</tt>
+   * method. If this is a nested class, then the constructor must
+   * include parameters defined by the base class.</li>
+   * <li>For each nested class, create a method that creates an
+   * instance of that nested class using a well-defined name. This
+   * method overrides the base class method defined for this purpose.</li>
+   */
+
+  public void preparePlainJava() {
+
+    // If this generated class uses the "straight Java" technique
+    // (no byte code manipulation), then the class must extend the
+    // template so it plays by normal Java rules for finding the
+    // template methods via inheritance rather than via code injection.
+
+    Class<?> baseClass = sig.getSignatureClass();
+    clazz._extends(baseClass);
+
+    // Create a constuctor for the class: either a default one,
+    // or (for nested classes) one that passes along arguments to
+    // the super class constructor.
+
+    Constructor<?>[] ctors = baseClass.getConstructors();
+    for (Constructor<?> ctor : ctors) {
+      addCtor(ctor.getParameterTypes());
+    }
+
+    // Some classes have no declared constructor, but we need to generate one
+    // anyway.
+
+    if ( ctors.length == 0 ) {
+      addCtor( new Class<?>[] {} );
+    }
+
+    // Repeat for inner classes.
+
+    for(ClassGenerator<T> child : innerClasses.values()) {
+      child.preparePlainJava();
+
+      // If there are inner classes, then we need to generate a "shim" method
+      // to instantiate that class.
+      //
+      // protected TemplateClass.TemplateInnerClass newTemplateInnerClass( 
args... ) {
+      //    return new GeneratedClass.GeneratedInnerClass( args... );
+      // }
+      //
+      // The name is special, it is "new" + inner class name. The template must
+      // provide a method of this name that creates the inner class instance.
+
+      String innerClassName = child.clazz.name();
+      JMethod shim = clazz.method(JMod.PROTECTED, 
child.sig.getSignatureClass(), "new" + innerClassName);
+      JInvocation childNew = JExpr._new(child.clazz);
+      Constructor<?>[] childCtors = 
child.sig.getSignatureClass().getConstructors();
+      Class<?>[] params;
+      if (childCtors.length==0) {
+        params = new Class<?>[0];
+      } else {
+        params = childCtors[0].getParameterTypes();
+      }
+      for (int i = 1; i < params.length; i++) {
+        Class<?> p = params[i];
+        childNew.arg(shim.param(model._ref(p), "arg" + i));
+      }
+      shim.body()._return(childNew);
+    }
+  }
+
+  /**
+   * The code generator creates a method called __DRILL_INIT__ which takes the
+   * place of the constructor when the code goes though the byte code merge.
+   * For Plain-old Java, we call the method from a constructor created for
+   * that purpose. (Generated code, fortunately, never includes a constructor,
+   * so we can create one.) Since the init block throws an exception (which
+   * should never occur), the generated constructor converts the checked
+   * exception into an unchecked one so as to not require changes to the
+   * various places that create instances of the generated classes.
+   *
+   * Example:<code><pre>
+   * public StreamingAggregatorGen1() {
+   *       try {
+   *         __DRILL_INIT__();
+   *     } catch (SchemaChangeException e) {
+   *         throw new UnsupportedOperationException(e);
+   *     }
+   * }</pre></code>
+   *
+   * Note: in Java 8 we'd use the <tt>Parameter</tt> class defined in Java's
+   * introspection package. But, Drill prefers Java 7 which only provides
+   * parameter types.
+   */
+
+  private void addCtor(Class<?>[] parameters) {
+    JMethod ctor = clazz.constructor(JMod.PUBLIC);
+    JBlock body = ctor.body();
+
+    // If there are parameters, need to pass them to the super class.
+    if (parameters.length > 0) {
+      JInvocation superCall = JExpr.invoke("super");
+
+      // This case only occurs for nested classes, and all nested classes
+      // in Drill are inner classes. Don't pass along the (hidden)
+      // this$0 field.
+
+      for (int i = 1; i < parameters.length; i++) {
+        Class<?> p = parameters[i];
+        superCall.arg(ctor.param(model._ref(p), "arg" + i));
+      }
+      body.add(superCall);
+    }
+    JTryBlock tryBlock = body._try();
+    tryBlock.body().invoke(SignatureHolder.DRILL_INIT_METHOD);
+    JCatchBlock catchBlock = 
tryBlock._catch(model.ref(SchemaChangeException.class));
+    
catchBlock.body()._throw(JExpr._new(model.ref(UnsupportedOperationException.class)).arg(catchBlock.param("e")));
+  }
+
   private static class ValueVectorSetup {
     final DirectExpression batch;
     final TypedFieldId fieldId;

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
index f50cfde..1b144b0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,15 +19,16 @@ package org.apache.drill.exec.expr;
 
 import java.io.IOException;
 
+import org.apache.drill.exec.compile.ClassBuilder;
 import org.apache.drill.exec.compile.TemplateClassDefinition;
 import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.server.options.OptionManager;
 
 import com.google.common.base.Preconditions;
 import com.sun.codemodel.JClassAlreadyExistsException;
 import com.sun.codemodel.JCodeModel;
 import com.sun.codemodel.JDefinedClass;
-import org.apache.drill.exec.server.options.OptionManager;
 
 /**
  * A code generator is responsible for generating the Java source code required
@@ -36,15 +37,15 @@ import org.apache.drill.exec.server.options.OptionManager;
  * outer and inner classes associated with a particular runtime generated 
instance.
  * <p>
  * Drill supports two ways to generate and compile the code from a code
- * generator: via byte-code manipulations or as "plain-old Java."
+ * generator: via byte-code manipulations or as "plain Java."
  * <p>
  * When using byte-code transformations, the code generator is used with a
  * class transformer to merge precompiled template code with runtime generated 
and
  * compiled query specific code to create a runtime instance.
  * <p>
- * The code generator can optionally be marked as "plain-old Java" capable.
+ * The code generator can optionally be marked as "plain Java" capable.
  * This means that the generated code can be compiled directly as a Java
- * class without the normal byte-code manipulations. Plain-old Java allows
+ * class without the normal byte-code manipulations. Plain Java allows
  * the option to persist, and debug, the generated code when building new
  * generated classes or otherwise working with generated code. To turn
  * on debugging, see the explanation in {@link ClassBuilder}.
@@ -67,18 +68,25 @@ public class CodeGenerator<T> {
 
   /**
    * True if the code generated for this class is suitable for compilation
-   * as a plain-old Java class.
+   * as a plain Java class.
    */
 
-  private boolean plainOldJavaCapable;
+  private boolean plainJavaCapable;
 
   /**
    * True if the code generated for this class should actually be compiled
-   * via the plain-old Java mechanism. Considered only if the class is
+   * via the plain Java mechanism. Considered only if the class is
    * capable of this technique.
    */
 
-  private boolean usePlainOldJava;
+  private boolean usePlainJava;
+
+  /**
+   * Whether to write code to disk to aid in debugging. Should only be set
+   * during development, never in production.
+   */
+
+  private boolean saveDebugCode;
   private String generatedCode;
   private String generifiedCode;
 
@@ -96,9 +104,6 @@ public class CodeGenerator<T> {
     try {
       this.model = new JCodeModel();
       JDefinedClass clazz = model._package(PACKAGE_NAME)._class(className);
-      if ( isPlainOldJava( ) ) {
-        clazz._extends(definition.getTemplateClass( ) );
-      }
       rootGenerator = new ClassGenerator<>(this, mappingSet, 
definition.getSignature(), new EvaluationVisitor(
           funcRegistry), clazz, model, optionManager);
     } catch (JClassAlreadyExistsException e) {
@@ -108,35 +113,60 @@ public class CodeGenerator<T> {
 
   /**
    * Indicates that the code for this class can be generated using the
-   * "Plain Old Java" mechanism based on inheritance. The byte-code
+   * "Plain Java" mechanism based on inheritance. The byte-code
    * method is more lenient, so some code is missing some features such
    * as proper exception labeling, etc. Set this option to true once
    * the generation mechanism for a class has been cleaned up to work
-   * via the plain-old Java mechanism.
+   * via the plain Java mechanism.
    *
    * @param flag true if the code generated from this instance is
-   * ready to be compiled as a plain-old Java class
+   * ready to be compiled as a plain Java class
    */
 
-  public void plainOldJavaCapable(boolean flag) {
-    plainOldJavaCapable = flag;
+  public void plainJavaCapable(boolean flag) {
+    plainJavaCapable = flag;
   }
 
   /**
    * Identifies that this generated class should be generated via the
-   * plain-old Java mechanism. This flag only has meaning if the
-   * generated class is capable of plain-old Java generation.
+   * plain Java mechanism. This flag only has meaning if the
+   * generated class is capable of plain Java generation.
    *
    * @param flag true if the class should be generated and compiled
-   * as a plain-old Java class (rather than via byte-code manipulations)
+   * as a plain Java class (rather than via byte-code manipulations)
    */
 
-  public void preferPlainOldJava(boolean flag) {
-    usePlainOldJava = flag;
+  public void preferPlainJava(boolean flag) {
+    usePlainJava = flag;
+  }
+
+  public boolean supportsPlainJava() {
+    return plainJavaCapable;
   }
 
-  public boolean isPlainOldJava() {
-    return plainOldJavaCapable && usePlainOldJava;
+  public boolean isPlainJava() {
+    return plainJavaCapable && usePlainJava;
+  }
+
+  /**
+   * Debug-time option to persist the code for the generated class to permit 
debugging.
+   * Has effect only when code is generated using the plain Java option. Code
+   * is written to the code directory specified in {@link ClassBuilder}.
+   * To debug code, set this option, then point your IDE to the code directory
+   * when the IDE prompts you for the source code location.
+   *
+   * @param persist true to write the code to disk, false (the default) to keep
+   * code only in memory.
+   */
+  public void saveCodeForDebugging(boolean persist) {
+    if (supportsPlainJava()) {
+      saveDebugCode = persist;
+      usePlainJava = true;
+    }
+  }
+
+  public boolean isCodeToBeSaved() {
+     return saveDebugCode;
   }
 
   public ClassGenerator<T> getRoot() {
@@ -145,13 +175,13 @@ public class CodeGenerator<T> {
 
   public void generate() {
 
-    // If this generated class uses the "straight Java" technique
+    // If this generated class uses the "plain Java" technique
     // (no byte code manipulation), then the class must extend the
     // template so it plays by normal Java rules for finding the
     // template methods via inheritance rather than via code injection.
 
-    if (isPlainOldJava()) {
-      rootGenerator.clazz._extends(definition.getTemplateClass( ));
+    if (isPlainJava()) {
+      rootGenerator.preparePlainJava( );
     }
 
     rootGenerator.flushCode();
@@ -165,8 +195,8 @@ public class CodeGenerator<T> {
       throw new IllegalStateException(e);
     }
 
-    this.generatedCode = w.getCode().toString();
-    this.generifiedCode = generatedCode.replaceAll(this.className, 
"GenericGenerated");
+    generatedCode = w.getCode().toString();
+    generifiedCode = generatedCode.replaceAll(className, "GenericGenerated");
   }
 
   public String generateAndGet() throws IOException {
@@ -186,6 +216,8 @@ public class CodeGenerator<T> {
     return fqcn;
   }
 
+  public String getClassName() { return className; }
+
   public static <T> CodeGenerator<T> get(TemplateClassDefinition<T> definition,
       FunctionImplementationRegistry funcRegistry) {
     return get(definition, funcRegistry, null);
@@ -249,5 +281,4 @@ public class CodeGenerator<T> {
     }
     return true;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/OrderedPartitionSender.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/OrderedPartitionSender.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/OrderedPartitionSender.java
index 2c9aeaf..794c574 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/OrderedPartitionSender.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/config/OrderedPartitionSender.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
index 149da25..ff159cd 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -56,12 +56,16 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     this.limit = limit;
     this.context = context;
     this.allocator = allocator;
+    @SuppressWarnings("resource")
     final DrillBuf drillBuf = allocator.buffer(4 * (limit + 1));
     heapSv4 = new SelectionVector4(drillBuf, limit, Character.MAX_VALUE);
     this.hasSv2 = hasSv2;
   }
 
   @Override
+  public boolean validate() { return true; }
+
+  @Override
   public void resetQueue(VectorContainer container, SelectionVector4 v4) 
throws SchemaChangeException {
     assert container.getSchema().getSelectionVectorMode() == 
BatchSchema.SelectionVectorMode.FOUR_BYTE;
     BatchSchema schema = container.getSchema();
@@ -75,6 +79,7 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     cleanup();
     hyperBatch = new ExpandableHyperContainer(newContainer);
     batchCount = hyperBatch.iterator().next().getValueVectors().length;
+    @SuppressWarnings("resource")
     final DrillBuf drillBuf = allocator.buffer(4 * (limit + 1));
     heapSv4 = new SelectionVector4(drillBuf, limit, Character.MAX_VALUE);
     // Reset queue size (most likely to be set to limit).
@@ -87,6 +92,7 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     doSetup(context, hyperBatch, null);
   }
 
+  @SuppressWarnings("resource")
   @Override
   public void add(FragmentContext context, RecordBatchData batch) throws 
SchemaChangeException{
     Stopwatch watch = Stopwatch.createStarted();
@@ -125,6 +131,7 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
   @Override
   public void generate() throws SchemaChangeException {
     Stopwatch watch = Stopwatch.createStarted();
+    @SuppressWarnings("resource")
     final DrillBuf drillBuf = allocator.buffer(4 * queueSize);
     finalSv4 = new SelectionVector4(drillBuf, queueSize, 4000);
     for (int i = queueSize - 1; i >= 0; i--) {
@@ -161,7 +168,7 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     }
   }
 
-  private void siftUp() {
+  private void siftUp() throws SchemaChangeException {
     int p = queueSize - 1;
     while (p > 0) {
       if (compare(p, (p - 1) / 2) > 0) {
@@ -173,7 +180,7 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     }
   }
 
-  private void siftDown() {
+  private void siftDown() throws SchemaChangeException {
     int p = 0;
     int next;
     while (p * 2 + 1 < queueSize) {
@@ -199,7 +206,11 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     int value = heapSv4.get(0);
     swap(0, queueSize - 1);
     queueSize--;
-    siftDown();
+    try {
+      siftDown();
+    } catch (SchemaChangeException e) {
+      throw new UnsupportedOperationException(e);
+    }
     return value;
   }
 
@@ -209,13 +220,17 @@ public abstract class PriorityQueueTemplate implements 
PriorityQueue {
     heapSv4.set(sv1, tmp);
   }
 
-  public int compare(int leftIndex, int rightIndex) {
+  public int compare(int leftIndex, int rightIndex) throws 
SchemaChangeException {
     int sv1 = heapSv4.get(leftIndex);
     int sv2 = heapSv4.get(rightIndex);
     return doEval(sv1, sv2);
   }
 
-  public abstract void doSetup(@Named("context") FragmentContext context, 
@Named("incoming") VectorContainer incoming, @Named("outgoing") RecordBatch 
outgoing);
-  public abstract int doEval(@Named("leftIndex") int leftIndex, 
@Named("rightIndex") int rightIndex);
-
+  public abstract void doSetup(@Named("context") FragmentContext context,
+                               @Named("incoming") VectorContainer incoming,
+                               @Named("outgoing") RecordBatch outgoing)
+                       throws SchemaChangeException;
+  public abstract int doEval(@Named("leftIndex") int leftIndex,
+                             @Named("rightIndex") int rightIndex)
+                      throws SchemaChangeException;
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index 0fbcb7d..d2497f1 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -128,7 +128,8 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
     switch (outcome) {
       case OK:
       case OK_NEW_SCHEMA:
-        for (VectorWrapper w : incoming) {
+        for (VectorWrapper<?> w : incoming) {
+          @SuppressWarnings("resource")
           ValueVector v = c.addOrGet(w.getField());
           if (v instanceof AbstractContainerVector) {
             w.getValueVector().makeTransferPair(v);
@@ -136,7 +137,8 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
           }
         }
         c = VectorContainer.canonicalize(c);
-        for (VectorWrapper w : c) {
+        for (VectorWrapper<?> w : c) {
+          @SuppressWarnings("resource")
           ValueVector v = container.addOrGet(w.getField());
           if (v instanceof AbstractContainerVector) {
             w.getValueVector().makeTransferPair(v);
@@ -219,7 +221,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
           // fall through.
         case OK:
           if (incoming.getRecordCount() == 0) {
-            for (VectorWrapper w : incoming) {
+            for (VectorWrapper<?> w : incoming) {
               w.clear();
             }
             break;
@@ -267,7 +269,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
 
       this.sv4 = priorityQueue.getFinalSv4();
       container.clear();
-      for (VectorWrapper w : priorityQueue.getHyperBatch()) {
+      for (VectorWrapper<?> w : priorityQueue.getHyperBatch()) {
         container.add(w.getValueVectors());
       }
       container.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
@@ -286,6 +288,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
     Stopwatch watch = Stopwatch.createStarted();
     VectorContainer c = priorityQueue.getHyperBatch();
     VectorContainer newContainer = new VectorContainer(oContext);
+    @SuppressWarnings("resource")
     SelectionVector4 selectionVector4 = priorityQueue.getHeapSv4();
     SimpleRecordBatch batch = new SimpleRecordBatch(c, selectionVector4, 
context);
     SimpleRecordBatch newBatch = new SimpleRecordBatch(newContainer, null, 
context);
@@ -294,11 +297,13 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
     } else {
       for (VectorWrapper<?> i : batch) {
 
+        @SuppressWarnings("resource")
         ValueVector v = TypeHelper.getNewVector(i.getField(), 
oContext.getAllocator());
         newContainer.add(v);
       }
       copier.setupRemover(context, batch, newBatch);
     }
+    @SuppressWarnings("resource")
     SortRecordBatchBuilder builder = new 
SortRecordBatchBuilder(oContext.getAllocator());
     try {
       do {
@@ -331,6 +336,9 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
                                                      VectorAccessible batch, 
MappingSet mainMapping, MappingSet leftMapping, MappingSet rightMapping)
           throws ClassTransformationException, IOException, 
SchemaChangeException{
     CodeGenerator<PriorityQueue> cg = 
CodeGenerator.get(PriorityQueue.TEMPLATE_DEFINITION, 
context.getFunctionRegistry(), context.getOptions());
+    cg.plainJavaCapable(true);
+    // Uncomment out this line to debug the generated code.
+//    cg.saveCodeForDebugging(true);
     ClassGenerator<PriorityQueue> g = cg.getRoot();
     g.setMappingSet(mainMapping);
 
@@ -381,10 +389,12 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
     final Stopwatch watch = Stopwatch.createStarted();
     final VectorContainer c = priorityQueue.getHyperBatch();
     final VectorContainer newContainer = new VectorContainer(oContext);
+    @SuppressWarnings("resource")
     final SelectionVector4 selectionVector4 = priorityQueue.getHeapSv4();
     final SimpleRecordBatch batch = new SimpleRecordBatch(c, selectionVector4, 
context);
     final SimpleRecordBatch newBatch = new SimpleRecordBatch(newContainer, 
null, context);
     copier = RemovingRecordBatch.getGenerated4Copier(batch, context, 
oContext.getAllocator(),  newContainer, newBatch, null);
+    @SuppressWarnings("resource")
     SortRecordBatchBuilder builder = new 
SortRecordBatchBuilder(oContext.getAllocator());
     try {
       do {

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
index d2b42d0..623c58b 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,7 +20,6 @@ package org.apache.drill.exec.physical.impl.aggregate;
 import java.io.IOException;
 import java.util.List;
 
-import com.google.common.collect.Lists;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.ErrorCollector;
@@ -34,7 +33,6 @@ import org.apache.drill.exec.compile.sig.MappingSet;
 import org.apache.drill.exec.exception.ClassTransformationException;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.ClassGenerator;
-import org.apache.drill.exec.expr.ClassGenerator.HoldingContainer;
 import org.apache.drill.exec.expr.CodeGenerator;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.TypeHelper;
@@ -56,6 +54,7 @@ import 
org.apache.drill.exec.record.selection.SelectionVector4;
 import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.ValueVector;
 
+import com.google.common.collect.Lists;
 import com.sun.codemodel.JExpr;
 import com.sun.codemodel.JVar;
 
@@ -122,7 +121,7 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
     if (!createAggregator()) {
       state = BatchState.DONE;
     }
-    for (VectorWrapper w : container) {
+    for (VectorWrapper<?> w : container) {
       AllocationHelper.allocatePrecomputedChildCount(w.getValueVector(), 0, 0, 
0);
     }
   }
@@ -190,6 +189,9 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
         CodeGenerator.get(HashAggregator.TEMPLATE_DEFINITION, 
context.getFunctionRegistry(), context.getOptions());
     ClassGenerator<HashAggregator> cg = top.getRoot();
     ClassGenerator<HashAggregator> cgInner = 
cg.getInnerGenerator("BatchHolder");
+    top.plainJavaCapable(true);
+    // Uncomment out this line to debug the generated code.
+//    top.saveCodeForDebugging(true);
 
     container.clear();
 
@@ -212,6 +214,7 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
       }
 
       final MaterializedField outputField = 
MaterializedField.create(ne.getRef().getAsNamePart().getName(), 
expr.getMajorType());
+      @SuppressWarnings("resource")
       ValueVector vv = TypeHelper.getNewVector(outputField, 
oContext.getAllocator());
 
       // add this group-by vector to the output container
@@ -236,6 +239,7 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
       }
 
       final MaterializedField outputField = 
MaterializedField.create(ne.getRef().getAsNamePart().getName(), 
expr.getMajorType());
+      @SuppressWarnings("resource")
       ValueVector vv = TypeHelper.getNewVector(outputField, 
oContext.getAllocator());
       aggrOutFieldIds[i] = container.add(vv);
 
@@ -268,7 +272,7 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
     cg.setMappingSet(UpdateAggrValuesMapping);
 
     for (LogicalExpression aggr : aggrExprs) {
-      HoldingContainer hc = cg.addExpr(aggr, 
ClassGenerator.BlkCreateMode.TRUE);
+      cg.addExpr(aggr, ClassGenerator.BlkCreateMode.TRUE);
     }
   }
 
@@ -290,9 +294,7 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
       
cg.getBlock("getVectorIndex")._return(var.invoke("getIndex").arg(JExpr.direct("recordIndex")));
       return;
     }
-
     }
-
   }
 
   @Override
@@ -307,5 +309,4 @@ public class HashAggBatch extends 
AbstractRecordBatch<HashAggregate> {
   protected void killIncoming(boolean sendUpstream) {
     incoming.kill(sendUpstream);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index c31264a..1615200 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -29,7 +29,6 @@ import org.apache.drill.common.expression.ErrorCollectorImpl;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.FieldReference;
 import org.apache.drill.common.expression.LogicalExpression;
-import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.compile.sig.RuntimeOverridden;
 import org.apache.drill.exec.exception.ClassTransformationException;
 import org.apache.drill.exec.exception.SchemaChangeException;
@@ -44,7 +43,6 @@ import org.apache.drill.exec.physical.impl.common.HashTable;
 import org.apache.drill.exec.physical.impl.common.HashTableConfig;
 import org.apache.drill.exec.physical.impl.common.HashTableStats;
 import org.apache.drill.exec.physical.impl.common.IndexPointer;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
@@ -60,30 +58,30 @@ import org.apache.drill.exec.vector.VariableWidthVector;
 public abstract class HashAggTemplate implements HashAggregator {
   private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(HashAggregator.class);
 
-  private static final long ALLOCATOR_INITIAL_RESERVATION = 1 * 1024 * 1024;
-  private static final long ALLOCATOR_MAX_RESERVATION = 20L * 1000 * 1000 * 
1000;
+//  private static final long ALLOCATOR_INITIAL_RESERVATION = 1 * 1024 * 1024;
+//  private static final long ALLOCATOR_MAX_RESERVATION = 20L * 1000 * 1000 * 
1000;
   private static final int VARIABLE_WIDTH_VALUE_SIZE = 50;
 
   private static final boolean EXTRA_DEBUG_1 = false;
   private static final boolean EXTRA_DEBUG_2 = false;
-  private static final String TOO_BIG_ERROR =
-      "Couldn't add value to an empty batch.  This likely means that a single 
value is too long for a varlen field.";
-  private boolean newSchema = false;
+//  private static final String TOO_BIG_ERROR =
+//      "Couldn't add value to an empty batch.  This likely means that a 
single value is too long for a varlen field.";
+//  private boolean newSchema = false;
   private int underlyingIndex = 0;
   private int currentIndex = 0;
   private IterOutcome outcome;
-  private int outputCount = 0;
+//  private int outputCount = 0;
   private int numGroupedRecords = 0;
   private int outBatchIndex = 0;
   private int lastBatchOutputCount = 0;
   private RecordBatch incoming;
-  private BatchSchema schema;
+//  private BatchSchema schema;
   private HashAggBatch outgoing;
   private VectorContainer outContainer;
-  private FragmentContext context;
+//  private FragmentContext context;
   private BufferAllocator allocator;
 
-  private HashAggregate hashAggrConfig;
+//  private HashAggregate hashAggrConfig;
   private HashTable htable;
   private ArrayList<BatchHolder> batchHolders;
   private IndexPointer htIdxHolder; // holder for the Hashtable's internal 
index returned by put()
@@ -125,7 +123,8 @@ public abstract class HashAggTemplate implements 
HashAggregator {
     private int capacity = Integer.MAX_VALUE;
     private boolean allocatedNextBatch = false;
 
-    private BatchHolder() {
+    @SuppressWarnings("resource")
+    public BatchHolder() {
 
       aggrValuesContainer = new VectorContainer();
       boolean success = false;
@@ -231,15 +230,15 @@ public abstract class HashAggTemplate implements 
HashAggregator {
       throw new IllegalArgumentException("Wrong number of workspace 
variables.");
     }
 
-    this.context = context;
+//    this.context = context;
     this.stats = stats;
     this.allocator = allocator;
     this.incoming = incoming;
-    this.schema = incoming.getSchema();
+//    this.schema = incoming.getSchema();
     this.outgoing = outgoing;
     this.outContainer = outContainer;
 
-    this.hashAggrConfig = hashAggrConfig;
+//    this.hashAggrConfig = hashAggrConfig;
 
     // currently, hash aggregation is only applicable if there are group-by 
expressions.
     // For non-grouped (a.k.a Plain) aggregations that don't involve DISTINCT, 
there is no
@@ -324,7 +323,7 @@ public abstract class HashAggTemplate implements 
HashAggregator {
                 if (EXTRA_DEBUG_1) {
                   logger.debug("Received new schema.  Batch has {} records.", 
incoming.getRecordCount());
                 }
-                newSchema = true;
+//                newSchema = true;
                 this.cleanup();
                 // TODO: new schema case needs to be handled appropriately
                 return AggOutcome.UPDATE_AGGREGATOR;
@@ -381,8 +380,9 @@ public abstract class HashAggTemplate implements 
HashAggregator {
       outgoingIter.next();
     }
     while (outgoingIter.hasNext()) {
+      @SuppressWarnings("resource")
       ValueVector vv = outgoingIter.next().getValueVector();
-      MajorType type = vv.getField().getType();
+//      MajorType type = vv.getField().getType();
 
       /*
        * In build schema we use the allocation model that specifies exact 
record count
@@ -424,13 +424,13 @@ public abstract class HashAggTemplate implements 
HashAggregator {
     }
   }
 
-  private final AggOutcome setOkAndReturn() {
-    this.outcome = IterOutcome.OK;
-    for (VectorWrapper<?> v : outgoing) {
-      v.getValueVector().getMutator().setValueCount(outputCount);
-    }
-    return AggOutcome.RETURN_OUTCOME;
-  }
+//  private final AggOutcome setOkAndReturn() {
+//    this.outcome = IterOutcome.OK;
+//    for (VectorWrapper<?> v : outgoing) {
+//      v.getValueVector().getMutator().setValueCount(outputCount);
+//    }
+//    return AggOutcome.RETURN_OUTCOME;
+//  }
 
   private final void incIndex() {
     underlyingIndex++;
@@ -447,7 +447,7 @@ public abstract class HashAggTemplate implements 
HashAggregator {
   }
 
   private void addBatchHolder() {
-    BatchHolder bh = new BatchHolder();
+    BatchHolder bh = newBatchHolder();
     batchHolders.add(bh);
 
     if (EXTRA_DEBUG_1) {
@@ -457,6 +457,13 @@ public abstract class HashAggTemplate implements 
HashAggregator {
     bh.setup();
   }
 
+  // Overridden in the generated class when created as plain Java code.
+
+  protected BatchHolder newBatchHolder() {
+    return new BatchHolder();
+  }
+
+  @Override
   public IterOutcome outputCurrentBatch() {
     if (outBatchIndex >= batchHolders.size()) {
       this.outcome = IterOutcome.NONE;
@@ -486,7 +493,7 @@ public abstract class HashAggTemplate implements 
HashAggregator {
       v.getValueVector().getMutator().setValueCount(numOutputRecords);
     }
 
-    outputCount += numOutputRecords;
+//    outputCount += numOutputRecords;
 
     this.outcome = IterOutcome.OK;
 
@@ -506,10 +513,12 @@ public abstract class HashAggTemplate implements 
HashAggregator {
     return this.outcome;
   }
 
+  @Override
   public boolean allFlushed() {
     return allFlushed;
   }
 
+  @Override
   public boolean buildComplete() {
     return buildComplete;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index ba830c4..420851a 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -205,6 +205,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
    * as we want the output to be NULL. For the required vectors (only for 
count()) we set the value to be zero since
    * we don't zero out our buffers initially while allocating them.
    */
+  @SuppressWarnings("resource")
   private void constructSpecialBatch() {
     int exprIndex = 0;
     for (final VectorWrapper<?> vw: container) {
@@ -259,6 +260,9 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
   private StreamingAggregator createAggregatorInternal() throws 
SchemaChangeException, ClassTransformationException, IOException{
     ClassGenerator<StreamingAggregator> cg = 
CodeGenerator.getRoot(StreamingAggTemplate.TEMPLATE_DEFINITION,
         context.getFunctionRegistry(), context.getOptions());
+    cg.getCodeGenerator().plainJavaCapable(true);
+    // Uncomment out this line to debug the generated code.
+//    cg.getCodeGenerator().saveCodeForDebugging(true);
     container.clear();
 
     LogicalExpression[] keyExprs = new 
LogicalExpression[popConfig.getKeys().size()];
@@ -275,6 +279,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
       }
       keyExprs[i] = expr;
       final MaterializedField outputField = 
MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
+      @SuppressWarnings("resource")
       final ValueVector vector = TypeHelper.getNewVector(outputField, 
oContext.getAllocator());
       keyOutputIds[i] = container.add(vector);
     }
@@ -290,6 +295,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
       }
 
       final MaterializedField outputField = 
MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
+      @SuppressWarnings("resource")
       ValueVector vector = TypeHelper.getNewVector(outputField, 
oContext.getAllocator());
       TypedFieldId id = container.add(vector);
       valueExprs[i] = new ValueVectorWriteExpression(id, expr, true);
@@ -366,7 +372,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
   private void addRecordValues(ClassGenerator<StreamingAggregator> cg, 
LogicalExpression[] valueExprs) {
     cg.setMappingSet(EVAL);
     for (final LogicalExpression ex : valueExprs) {
-      final HoldingContainer hc = cg.addExpr(ex);
+      cg.addExpr(ex);
     }
   }
 
@@ -375,7 +381,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
   private void outputRecordKeys(ClassGenerator<StreamingAggregator> cg, 
TypedFieldId[] keyOutputIds, LogicalExpression[] keyExprs) {
     cg.setMappingSet(RECORD_KEYS);
     for (int i = 0; i < keyExprs.length; i++) {
-      final HoldingContainer hc = cg.addExpr(new 
ValueVectorWriteExpression(keyOutputIds[i], keyExprs[i], true));
+      cg.addExpr(new ValueVectorWriteExpression(keyOutputIds[i], keyExprs[i], 
true));
     }
   }
 
@@ -395,7 +401,7 @@ public class StreamingAggBatch extends 
AbstractRecordBatch<StreamingAggregate> {
       cg.setMappingSet(RECORD_KEYS_PREV);
       final HoldingContainer innerExpression = cg.addExpr(keyExprs[i], 
ClassGenerator.BlkCreateMode.FALSE);
       cg.setMappingSet(RECORD_KEYS_PREV_OUT);
-      final HoldingContainer outerExpression = cg.addExpr(new 
ValueVectorWriteExpression(keyOutputIds[i], new 
HoldingContainerExpression(innerExpression), true), 
ClassGenerator.BlkCreateMode.FALSE);
+      cg.addExpr(new ValueVectorWriteExpression(keyOutputIds[i], new 
HoldingContainerExpression(innerExpression), true), 
ClassGenerator.BlkCreateMode.FALSE);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
index 82e8777..3417611 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,7 +20,6 @@ package org.apache.drill.exec.physical.impl.aggregate;
 import javax.inject.Named;
 
 import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
@@ -54,7 +53,6 @@ public abstract class StreamingAggTemplate implements 
StreamingAggregator {
     setupInterior(incoming, outgoing);
   }
 
-
   private void allocateOutgoing() {
     for (VectorWrapper<?> w : outgoing) {
       w.getValueVector().allocateNew();
@@ -348,5 +346,4 @@ public abstract class StreamingAggTemplate implements 
StreamingAggregator {
   public abstract void outputRecordValues(@Named("outIndex") int outIndex);
   public abstract int getVectorIndex(@Named("recordIndex") int recordIndex);
   public abstract boolean resetValues();
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
index 972e8c7..77ebb0d 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/ChainedHashTable.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed
  * with this work for additional information regarding copyright
@@ -132,6 +132,11 @@ public class ChainedHashTable {
   public HashTable createAndSetupHashTable(TypedFieldId[] outKeyFieldIds) 
throws ClassTransformationException,
       IOException, SchemaChangeException {
     CodeGenerator<HashTable> top = 
CodeGenerator.get(HashTable.TEMPLATE_DEFINITION, context.getFunctionRegistry(), 
context.getOptions());
+    top.plainJavaCapable(true);
+    // Uncomment out this line to debug the generated code.
+    // This code is called from generated code, so to step into this code,
+    // persist the code generated in HashAggBatch also.
+//  top.saveCodeForDebugging(true);
     ClassGenerator<HashTable> cg = top.getRoot();
     ClassGenerator<HashTable> cgInner = cg.getInnerGenerator("BatchHolder");
 
@@ -188,6 +193,7 @@ public class ChainedHashTable {
     for (NamedExpression ne : htConfig.getKeyExprsBuild()) {
       LogicalExpression expr = keyExprsBuild[i];
       final MaterializedField outputField = 
MaterializedField.create(ne.getRef().getAsUnescapedPath(), expr.getMajorType());
+      @SuppressWarnings("resource")
       ValueVector vv = TypeHelper.getNewVector(outputField, allocator);
       htKeyFieldIds[i] = htContainerOrig.add(vv);
       i++;

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index efd695e..96f9422 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,8 +22,6 @@ import java.util.Iterator;
 
 import javax.inject.Named;
 
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.compile.sig.RuntimeOverridden;
@@ -35,7 +33,6 @@ import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.TransferPair;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.BigIntVector;
 import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.IntVector;
@@ -73,7 +70,7 @@ public abstract class HashTableTemplate implements HashTable {
   // Placeholder for the current index while probing the hash table
   private IndexPointer currentIdxHolder;
 
-  private FragmentContext context;
+//  private FragmentContext context;
 
   private BufferAllocator allocator;
 
@@ -114,11 +111,11 @@ public abstract class HashTableTemplate implements 
HashTable {
     private IntVector hashValues;
 
     private int maxOccupiedIdx = -1;
-    private int batchOutputCount = 0;
+//    private int batchOutputCount = 0;
 
     private int batchIndex = 0;
 
-    private BatchHolder(int idx) {
+    public BatchHolder(int idx) {
 
       this.batchIndex = idx;
 
@@ -126,6 +123,7 @@ public abstract class HashTableTemplate implements 
HashTable {
       boolean success = false;
       try {
         for (VectorWrapper<?> w : htContainerOrig) {
+          @SuppressWarnings("resource")
           ValueVector vv = TypeHelper.getNewVector(w.getField(), allocator);
 
           // Capacity for "hashValues" and "links" vectors is BATCH_SIZE 
records. It is better to allocate space for
@@ -331,7 +329,9 @@ public abstract class HashTableTemplate implements 
HashTable {
       Iterator<VectorWrapper<?>> outgoingIter = outContainer.iterator();
 
       for (VectorWrapper<?> sourceWrapper : htContainer) {
+        @SuppressWarnings("resource")
         ValueVector sourceVV = sourceWrapper.getValueVector();
+        @SuppressWarnings("resource")
         ValueVector targetVV = outgoingIter.next().getValueVector();
         TransferPair tp = sourceVV.makeTransferPair(targetVV);
         tp.splitAndTransfer(outStartIndex, numRecords);
@@ -362,6 +362,7 @@ public abstract class HashTableTemplate implements 
HashTable {
 
     private void setValueCount() {
       for (VectorWrapper<?> vw : htContainer) {
+        @SuppressWarnings("resource")
         ValueVector vv = vw.getValueVector();
         vv.getMutator().setValueCount(maxOccupiedIdx + 1);
       }
@@ -452,7 +453,7 @@ public abstract class HashTableTemplate implements 
HashTable {
     }
 
     this.htConfig = htConfig;
-    this.context = context;
+//    this.context = context;
     this.allocator = allocator;
     this.incomingBuild = incomingBuild;
     this.incomingProbe = incomingProbe;
@@ -480,6 +481,7 @@ public abstract class HashTableTemplate implements 
HashTable {
     currentIdxHolder = new IndexPointer();
   }
 
+  @Override
   public void updateBatches() {
     doSetup(incomingBuild, incomingProbe);
     for (BatchHolder batchHolder : batchHolders) {
@@ -495,10 +497,12 @@ public abstract class HashTableTemplate implements 
HashTable {
     return numResizing;
   }
 
+  @Override
   public int size() {
     return numEntries;
   }
 
+  @Override
   public void getStats(HashTableStats stats) {
     assert stats != null;
     stats.numBuckets = numBuckets();
@@ -507,10 +511,12 @@ public abstract class HashTableTemplate implements 
HashTable {
     stats.resizingTime = resizingTime;
   }
 
+  @Override
   public boolean isEmpty() {
     return numEntries == 0;
   }
 
+  @Override
   public void clear() {
     if (batchHolders != null) {
       for (BatchHolder bh : batchHolders) {
@@ -538,6 +544,7 @@ public abstract class HashTableTemplate implements 
HashTable {
     return rounded;
   }
 
+  @Override
   public void put(int incomingRowIdx, IndexPointer htIdxHolder, int 
retryCount) {
     put(incomingRowIdx, htIdxHolder);
   }
@@ -680,12 +687,16 @@ public abstract class HashTableTemplate implements 
HashTable {
   }
 
   private BatchHolder addBatchHolder() {
-    BatchHolder bh = new BatchHolder(batchHolders.size());
+    BatchHolder bh = newBatchHolder(batchHolders.size());
     batchHolders.add(bh);
     bh.setup();
     return bh;
   }
 
+  protected BatchHolder newBatchHolder(int index) {
+    return new BatchHolder(index);
+  }
+
   // Resize the hash table if needed by creating a new one with double the 
number of buckets.
   // For each entry in the old hash table, re-hash it to the new table and 
update the metadata
   // in the new table.. the metadata consists of the startIndices, links and 
hashValues.
@@ -744,6 +755,7 @@ public abstract class HashTableTemplate implements 
HashTable {
     numResizing++;
   }
 
+  @Override
   public boolean outputKeys(int batchIdx, VectorContainer outContainer, int 
outStartIndex, int numRecords) {
     assert batchIdx < batchHolders.size();
     if (!batchHolders.get(batchIdx).outputKeys(outContainer, outStartIndex, 
numRecords)) {
@@ -762,6 +774,7 @@ public abstract class HashTableTemplate implements 
HashTable {
     return vector;
   }
 
+  @Override
   public void addNewKeyBatch() {
     int numberOfBatches = batchHolders.size();
     this.addBatchHolder();

http://git-wip-us.apache.org/repos/asf/drill/blob/ee399317/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
index 4b16185..6dfd311 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/filter/FilterRecordBatch.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -77,7 +77,11 @@ public class FilterRecordBatch extends 
AbstractSingleRecordBatch<Filter>{
   protected IterOutcome doWork() {
     container.zeroVectors();
     int recordCount = incoming.getRecordCount();
-    filter.filterBatch(recordCount);
+    try {
+      filter.filterBatch(recordCount);
+    } catch (SchemaChangeException e) {
+      throw new UnsupportedOperationException(e);
+    }
 
     return IterOutcome.OK;
   }
@@ -191,7 +195,11 @@ public class FilterRecordBatch extends 
AbstractSingleRecordBatch<Filter>{
 
     try {
       final TransferPair[] tx = transfers.toArray(new 
TransferPair[transfers.size()]);
-      final Filterer filter = context.getImplementationClass(cg);
+      CodeGenerator<Filterer> codeGen = cg.getCodeGenerator();
+      codeGen.plainJavaCapable(true);
+      // Uncomment out this line to debug the generated code.
+//    cg.saveCodeForDebugging(true);
+      final Filterer filter = context.getImplementationClass(codeGen);
       filter.setup(context, incoming, this, tx);
       return filter;
     } catch (ClassTransformationException | IOException e) {

Reply via email to