This is an automated email from the ASF dual-hosted git repository.

volodymyr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit d4c8f35f508e3e8e517a69975028bec3c7975b25
Author: Volodymyr Vysotskyi <[email protected]>
AuthorDate: Wed Dec 11 15:11:31 2019 +0200

    DRILL-7483: Add support for 12 and 13 java versions
    
    closes #1935
---
 .circleci/config.yml                               | 345 ++++++++++-----------
 .../apache/drill/exec/hive/HiveClusterTest.java    |  32 ++
 .../org/apache/drill/exec/hive/HiveTestBase.java   |  31 +-
 .../apache/drill/exec/hive/HiveTestUtilities.java  |  22 ++
 .../exec/hive/complex_types/TestHiveArrays.java    |   6 +-
 .../exec/hive/complex_types/TestHiveMaps.java      |   6 +-
 .../exec/hive/complex_types/TestHiveStructs.java   |   6 +-
 .../exec/hive/complex_types/TestHiveUnions.java    |   6 +-
 .../hive/BaseTestHiveImpersonation.java            |   7 +
 .../exec/sql/hive/TestViewSupportOnHiveTables.java |   6 +-
 contrib/storage-mongo/pom.xml                      |   2 +-
 .../apache/drill/exec/compile/MergeAdapter.java    |  94 +++---
 .../impl/metadata/MetadataAggregateHelper.java     |   3 +-
 .../physical/impl/validate/BatchValidator.java     |   4 +
 .../rest/spnego/TestDrillSpnegoAuthenticator.java  |  48 ++-
 .../exec/sql/TestInfoSchemaWithMetastore.java      |   5 +-
 .../drill/exec/sql/TestMetastoreCommands.java      |   3 +-
 .../drill/exec/memory/BoundsCheckingTest.java      |  37 +--
 metastore/iceberg-metastore/pom.xml                |  12 +-
 .../iceberg/schema/TestIcebergTableSchema.java     | 241 ++++++++------
 pom.xml                                            |  19 +-
 tools/fmpp/pom.xml                                 |  10 +-
 22 files changed, 523 insertions(+), 422 deletions(-)

diff --git a/.circleci/config.yml b/.circleci/config.yml
index 31cb2cc..71d424f 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -25,45 +25,46 @@ jobs:
     working_directory: ~/drill
 
     steps:
-    - checkout
-    - restore_cache:
-        keys:
-        - m2-{{ checksum "pom.xml" }}
-        - m2- # used if checksum fails
-    - run:
-        name: Update maven version
-        # TODO: Could be removed, once Machine Executor image is updated 
https://github.com/circleci/image-builder/issues/140
-        # and the possibility of specifying Maven version is added 
https://github.com/circleci/image-builder/issues/143
-        command:
-          curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.0
-    - run:
-        name: Update packages list
-        command:
-          sudo apt-get update
-    - run:
-        name: Install libaio1.so library for MySQL integration tests
-        command:
-          sudo apt-get install libaio1 libaio-dev
-    - run:
-        name: Drill project build
-        # TODO: 2. Optimizing Maven Builds on CircleCI - 
https://circleci.com/blog/optimizing-maven-builds-on-circleci/
-        # TODO: 3. Resolving memory issues without "SlowTest" and 
"UnlikelyTest" excludedGroups in the build
-        command: >
-          mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
-          
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
-    - run:
-        name: Save test results
-        command: |
-          mkdir -p ~/test-results/junit/
-          find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp {} 
~/test-results/junit/ \;
-        when: always
-    - store_test_results:
-        path: ~/test-results
-    - save_cache:
-        paths:
-          - ~/.m2
-        key: m2-{{ checksum "pom.xml" }}
-  build_jdk9:
+      - checkout
+      - restore_cache:
+          keys:
+            - m2-{{ checksum "pom.xml" }}
+            - m2- # used if checksum fails
+      - run:
+          name: Update maven version
+          # TODO: Could be removed, once Machine Executor image is updated 
https://github.com/circleci/image-builder/issues/140
+          # and the possibility of specifying Maven version is added 
https://github.com/circleci/image-builder/issues/143
+          command:
+            curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.3
+      - run:
+          name: Update packages list
+          command:
+            sudo apt-get update
+      - run:
+          name: Install libaio1.so library for MySQL integration tests
+          command:
+            sudo apt-get install libaio1 libaio-dev
+      - run:
+          name: Drill project build
+          # TODO: 2. Optimizing Maven Builds on CircleCI - 
https://circleci.com/blog/optimizing-maven-builds-on-circleci/
+          # TODO: 3. Resolving memory issues without "SlowTest" and 
"UnlikelyTest" excludedGroups in the build
+          command: >
+            mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
+            
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
+      - run:
+          name: Save test results
+          command: |
+            mkdir -p ~/test-results/junit/
+            find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp 
{} ~/test-results/junit/ \;
+          when: always
+      - store_test_results:
+          path: ~/test-results
+      - save_cache:
+          paths:
+            - ~/.m2
+          key: m2-{{ checksum "pom.xml" }}
+
+  build_jdk11:
     machine:
       enabled: true
       image: circleci/classic:latest
@@ -72,50 +73,50 @@ jobs:
     working_directory: ~/drill
 
     steps:
-    - checkout
-    - restore_cache:
-        keys:
-        - m2-{{ checksum "pom.xml" }}
-        - m2- # used if checksum fails
-    - run:
-        name: Update packages list
-        command:
-          sudo apt-get update
-    - run:
-        name: Install java 9
-        command:
-          sudo apt-get -y install openjdk-9-jdk
-    - run:
-        name: Set default java 9
-        command:
-          sudo update-java-alternatives --set java-1.9.0-openjdk-amd64
-    - run:
-        name: Update maven version
-        command:
-          curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.0
-    - run:
-        name: Install libaio1.so library for MySQL integration tests
-        command:
-          sudo apt-get install libaio1 libaio-dev
-    - run:
-        name: Drill project build
-        command: >
-          mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
-          
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
-    - run:
-        name: Save test results
-        command: |
-          mkdir -p ~/test-results/junit/
-          find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp {} 
~/test-results/junit/ \;
-        when: always
-    - store_test_results:
-        path: ~/test-results
-    - save_cache:
-        paths:
-          - ~/.m2
-        key: m2-{{ checksum "pom.xml" }}
+      - checkout
+      - restore_cache:
+          keys:
+            - m2-{{ checksum "pom.xml" }}
+            - m2- # used if checksum fails
+      - run:
+          name: Update packages list
+          command:
+            sudo apt-get update
+      - run:
+          name: Install java 11
+          command:
+            sudo apt-get -y install openjdk-11-jdk
+      - run:
+          name: Set default java 11
+          command:
+            sudo update-java-alternatives --set java-1.11.0-openjdk-amd64
+      - run:
+          name: Update maven version
+          command:
+            curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.3
+      - run:
+          name: Install libaio1.so library for MySQL integration tests
+          command:
+            sudo apt-get install libaio1 libaio-dev
+      - run:
+          name: Drill project build
+          command: >
+            mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
+            
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
+      - run:
+          name: Save test results
+          command: |
+            mkdir -p ~/test-results/junit/
+            find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp 
{} ~/test-results/junit/ \;
+          when: always
+      - store_test_results:
+          path: ~/test-results
+      - save_cache:
+          paths:
+            - ~/.m2
+          key: m2-{{ checksum "pom.xml" }}
 
-  build_jdk10:
+  build_jdk12:
     machine:
       enabled: true
       image: circleci/classic:latest
@@ -124,50 +125,50 @@ jobs:
     working_directory: ~/drill
 
     steps:
-    - checkout
-    - restore_cache:
-        keys:
-        - m2-{{ checksum "pom.xml" }}
-        - m2- # used if checksum fails
-    - run:
-        name: Update packages list
-        command:
-          sudo apt-get update
-    - run:
-        name: Install java 10
-        command:
-          sudo apt-get -y install openjdk-10-jdk
-    - run:
-        name: Set default java 10
-        command:
-          sudo update-java-alternatives --set java-1.10.0-openjdk-amd64
-    - run:
-        name: Update maven version
-        command:
-          curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.0
-    - run:
-        name: Install libaio1.so library for MySQL integration tests
-        command:
-          sudo apt-get install libaio1 libaio-dev
-    - run:
-        name: Drill project build
-        command: >
-          mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
-          
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
-    - run:
-        name: Save test results
-        command: |
-          mkdir -p ~/test-results/junit/
-          find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp {} 
~/test-results/junit/ \;
-        when: always
-    - store_test_results:
-        path: ~/test-results
-    - save_cache:
-        paths:
-          - ~/.m2
-        key: m2-{{ checksum "pom.xml" }}
+      - checkout
+      - restore_cache:
+          keys:
+            - m2-{{ checksum "pom.xml" }}
+            - m2- # used if checksum fails
+      - run:
+          name: Update packages list
+          command:
+            sudo apt-get update
+      - run:
+          name: Install java 12
+          command:
+            sudo apt-get -y install openjdk-12-jdk
+      - run:
+          name: Set default java 12
+          command:
+            sudo update-java-alternatives --set java-1.12.0-openjdk-amd64
+      - run:
+          name: Update maven version
+          command:
+            curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.3
+      - run:
+          name: Install libaio1.so library for MySQL integration tests
+          command:
+            sudo apt-get install libaio1 libaio-dev
+      - run:
+          name: Drill project build
+          command: >
+            mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
+            
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
+      - run:
+          name: Save test results
+          command: |
+            mkdir -p ~/test-results/junit/
+            find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp 
{} ~/test-results/junit/ \;
+          when: always
+      - store_test_results:
+          path: ~/test-results
+      - save_cache:
+          paths:
+            - ~/.m2
+          key: m2-{{ checksum "pom.xml" }}
 
-  build_jdk11:
+  build_jdk13:
     machine:
       enabled: true
       image: circleci/classic:latest
@@ -176,56 +177,54 @@ jobs:
     working_directory: ~/drill
 
     steps:
-    - checkout
-    - restore_cache:
-        keys:
-        - m2-{{ checksum "pom.xml" }}
-        - m2- # used if checksum fails
-    - run:
-        name: Update packages list
-        command:
-          sudo apt-get update
-    - run:
-        name: Install java 11
-        command:
-          sudo apt-get -y install openjdk-11-jdk
-    - run:
-        name: Set default java 11
-        command:
-          sudo update-java-alternatives --set java-1.11.0-openjdk-amd64
-    - run:
-        name: Update maven version
-        command:
-          curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.0
-    - run:
-        name: Install libaio1.so library for MySQL integration tests
-        command:
-          sudo apt-get install libaio1 libaio-dev
-    - run:
-        name: Drill project build
-        # Set forkCount to 1 since tests use more memory and memory 
limitations for CircleCI is reached
-        # for default value of forkCount.
-        command: >
-          mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608 -DforkCount=1
-          
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
-    - run:
-        name: Save test results
-        command: |
-          mkdir -p ~/test-results/junit/
-          find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp {} 
~/test-results/junit/ \;
-        when: always
-    - store_test_results:
-        path: ~/test-results
-    - save_cache:
-        paths:
-          - ~/.m2
-        key: m2-{{ checksum "pom.xml" }}
+      - checkout
+      - restore_cache:
+          keys:
+            - m2-{{ checksum "pom.xml" }}
+            - m2- # used if checksum fails
+      - run:
+          name: Update packages list
+          command:
+            sudo apt-get update
+      - run:
+          name: Install java 13
+          command:
+            sudo apt-get -y install openjdk-13-jdk
+      - run:
+          name: Set default java 13
+          command:
+            sudo update-java-alternatives --set java-1.13.0-openjdk-amd64
+      - run:
+          name: Update maven version
+          command:
+            curl -fsSL https://git.io/vpDIf | bash -s -- 3.6.3
+      - run:
+          name: Install libaio1.so library for MySQL integration tests
+          command:
+            sudo apt-get install libaio1 libaio-dev
+      - run:
+          name: Drill project build
+          command: >
+            mvn install -Drat.skip=false -Dlicense.skip=false -DmemoryMb=2560 
-DdirectMemoryMb=4608
+            
-DexcludedGroups="org.apache.drill.categories.SlowTest,org.apache.drill.categories.UnlikelyTest"
 --batch-mode
+      - run:
+          name: Save test results
+          command: |
+            mkdir -p ~/test-results/junit/
+            find . -type f -regex ".*/target/surefire-reports/.*xml" -exec cp 
{} ~/test-results/junit/ \;
+          when: always
+      - store_test_results:
+          path: ~/test-results
+      - save_cache:
+          paths:
+            - ~/.m2
+          key: m2-{{ checksum "pom.xml" }}
 
 workflows:
   version: 2
   build_and_test:
     jobs:
-    - build_jdk8
-    - build_jdk9
-    - build_jdk10
-    - build_jdk11
+      - build_jdk8
+      - build_jdk11
+      - build_jdk12
+      - build_jdk13
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveClusterTest.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveClusterTest.java
new file mode 100644
index 0000000..3fd3a11
--- /dev/null
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveClusterTest.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.hive;
+
+import org.apache.drill.test.ClusterTest;
+import org.junit.BeforeClass;
+
+/**
+ * Base class for Hive cluster tests.
+ */
+public class HiveClusterTest extends ClusterTest {
+
+  @BeforeClass
+  public static void checkJavaVersion() {
+    HiveTestUtilities.assumeJavaVersion();
+  }
+}
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestBase.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestBase.java
index c3acdb0..05ce7ae 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestBase.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestBase.java
@@ -37,30 +37,35 @@ public class HiveTestBase extends PlanTestBase {
   public static final HiveTestFixture HIVE_TEST_FIXTURE;
 
   static {
-    // generate hive data common for all test classes using own dirWatcher
-    BaseDirTestWatcher generalDirWatcher = new BaseDirTestWatcher() {
-      {
+    if (HiveTestUtilities.supportedJavaVersion()) {
+      // generate hive data common for all test classes using own dirWatcher
+      BaseDirTestWatcher generalDirWatcher = new BaseDirTestWatcher() {
+        {
         /*
            Below protected method invoked to create directory DirWatcher.dir 
with path like:
            
./target/org.apache.drill.exec.hive.HiveTestBase123e4567-e89b-12d3-a456-556642440000.
            Then subdirectory with name 'root' will be used to hold 
metastore_db and other data shared between
            all derivatives of the class. Note that UUID suffix is necessary to 
avoid conflicts between forked JVMs.
         */
-        
starting(Description.createSuiteDescription(HiveTestBase.class.getName().concat(UUID.randomUUID().toString())));
-      }
-    };
-    File baseDir = generalDirWatcher.getRootDir();
-    HIVE_TEST_FIXTURE = HiveTestFixture.builder(baseDir).build();
-    HiveTestDataGenerator dataGenerator = new 
HiveTestDataGenerator(generalDirWatcher, baseDir,
-        HIVE_TEST_FIXTURE.getWarehouseDir());
-    
HIVE_TEST_FIXTURE.getDriverManager().runWithinSession(dataGenerator::generateData);
+          
starting(Description.createSuiteDescription(HiveTestBase.class.getName().concat(UUID.randomUUID().toString())));
+        }
+      };
+      File baseDir = generalDirWatcher.getRootDir();
+      HIVE_TEST_FIXTURE = HiveTestFixture.builder(baseDir).build();
+      HiveTestDataGenerator dataGenerator = new 
HiveTestDataGenerator(generalDirWatcher, baseDir,
+          HIVE_TEST_FIXTURE.getWarehouseDir());
+      
HIVE_TEST_FIXTURE.getDriverManager().runWithinSession(dataGenerator::generateData);
 
-    // set hook for clearing watcher's dir on JVM shutdown
-    Runtime.getRuntime().addShutdownHook(new Thread(() -> 
FileUtils.deleteQuietly(generalDirWatcher.getDir())));
+      // set hook for clearing watcher's dir on JVM shutdown
+      Runtime.getRuntime().addShutdownHook(new Thread(() -> 
FileUtils.deleteQuietly(generalDirWatcher.getDir())));
+    } else {
+      HIVE_TEST_FIXTURE = null;
+    }
   }
 
   @BeforeClass
   public static void setUp() {
+    HiveTestUtilities.assumeJavaVersion();
     HIVE_TEST_FIXTURE.getPluginManager().addHivePluginTo(bits);
   }
 
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
index ead1479..8518fca 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestUtilities.java
@@ -30,9 +30,12 @@ import org.apache.drill.test.TestTools;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.junit.AssumptionViolatedException;
 
 import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.startsWith;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assume.assumeThat;
 
 public class HiveTestUtilities {
 
@@ -128,4 +131,23 @@ public class HiveTestUtilities {
     assertThat(plan, containsString("HiveDrillNativeParquetScan"));
   }
 
+  /**
+   * Current Hive version doesn't support JDK 9+.
+   * Checks if current version is supported by Hive.
+   *
+   * @return {@code true} if current version is supported by Hive, {@code 
false} otherwise
+   */
+  public static boolean supportedJavaVersion() {
+    return System.getProperty("java.version").startsWith("1.8");
+  }
+
+  /**
+   * Checks if current version is supported by Hive.
+   *
+   * @throws AssumptionViolatedException if current version is not supported 
by Hive,
+   * so unit tests may be skipped.
+   */
+  public static void assumeJavaVersion() throws AssumptionViolatedException {
+    assumeThat("Skipping tests since Hive supports only JDK 8.", 
System.getProperty("java.version"), startsWith("1.8"));
+  }
 }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
index 955b800..9513cbb 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveArrays.java
@@ -28,12 +28,12 @@ import java.util.stream.Stream;
 import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.hive.HiveClusterTest;
 import org.apache.drill.exec.hive.HiveTestFixture;
 import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
 import org.apache.drill.exec.util.Text;
 import org.apache.drill.test.ClusterFixture;
-import org.apache.drill.test.ClusterTest;
 import org.apache.drill.test.TestBuilder;
 import org.apache.hadoop.hive.ql.Driver;
 import org.junit.AfterClass;
@@ -50,7 +50,7 @@ import static org.apache.drill.test.TestBuilder.listOf;
 import static org.apache.drill.test.TestBuilder.mapOfObject;
 
 @Category({SlowTest.class, HiveStorageTest.class})
-public class TestHiveArrays extends ClusterTest {
+public class TestHiveArrays extends HiveClusterTest {
 
   private static HiveTestFixture hiveTestFixture;
 
@@ -67,7 +67,7 @@ public class TestHiveArrays extends ClusterTest {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
+  public static void tearDown() {
     if (hiveTestFixture != null) {
       
hiveTestFixture.getPluginManager().removeHivePluginFrom(cluster.drillbit());
     }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveMaps.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveMaps.java
index 8ee7eaa..4f2118e 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveMaps.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveMaps.java
@@ -24,11 +24,11 @@ import java.nio.file.Paths;
 import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.hive.HiveClusterTest;
 import org.apache.drill.exec.hive.HiveTestFixture;
 import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
 import org.apache.drill.test.ClusterFixture;
-import org.apache.drill.test.ClusterTest;
 import org.apache.hadoop.hive.ql.Driver;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -45,7 +45,7 @@ import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.apache.drill.test.TestBuilder.mapOfObject;
 
 @Category({SlowTest.class, HiveStorageTest.class})
-public class TestHiveMaps extends ClusterTest {
+public class TestHiveMaps extends HiveClusterTest {
 
   private static HiveTestFixture hiveTestFixture;
 
@@ -59,7 +59,7 @@ public class TestHiveMaps extends ClusterTest {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
+  public static void tearDown() {
     if (hiveTestFixture != null) {
       
hiveTestFixture.getPluginManager().removeHivePluginFrom(cluster.drillbit());
     }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveStructs.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveStructs.java
index 106ab17..4b0750d 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveStructs.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveStructs.java
@@ -23,13 +23,13 @@ import java.nio.file.Paths;
 import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.hive.HiveClusterTest;
 import org.apache.drill.exec.hive.HiveTestFixture;
 import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.util.JsonStringHashMap;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
 import org.apache.drill.exec.util.Text;
 import org.apache.drill.test.ClusterFixture;
-import org.apache.drill.test.ClusterTest;
 import org.apache.hadoop.hive.ql.Driver;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -44,7 +44,7 @@ import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.apache.drill.test.TestBuilder.mapOfObject;
 
 @Category({SlowTest.class, HiveStorageTest.class})
-public class TestHiveStructs extends ClusterTest {
+public class TestHiveStructs extends HiveClusterTest {
 
   private static final JsonStringHashMap<String, Object> STR_N0_ROW_1 = mapOf(
       "f_int", -3000, "f_string", new Text("AbbBBa"), "f_varchar", new 
Text("-c54g"), "f_char", new Text("Th"),
@@ -88,7 +88,7 @@ public class TestHiveStructs extends ClusterTest {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
+  public static void tearDown() {
     if (hiveTestFixture != null) {
       
hiveTestFixture.getPluginManager().removeHivePluginFrom(cluster.drillbit());
     }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveUnions.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveUnions.java
index 3dc09c9..3a02a1e 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveUnions.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/complex_types/TestHiveUnions.java
@@ -24,10 +24,10 @@ import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
+import org.apache.drill.exec.hive.HiveClusterTest;
 import org.apache.drill.exec.hive.HiveTestFixture;
 import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.test.ClusterFixture;
-import org.apache.drill.test.ClusterTest;
 import org.apache.hadoop.hive.ql.Driver;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -39,7 +39,7 @@ import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.apache.drill.test.TestBuilder.mapOfObject;
 
 @Category({SlowTest.class, HiveStorageTest.class})
-public class TestHiveUnions extends ClusterTest {
+public class TestHiveUnions extends HiveClusterTest {
 
   private static HiveTestFixture hiveTestFixture;
 
@@ -54,7 +54,7 @@ public class TestHiveUnions extends ClusterTest {
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
+  public static void tearDown() {
     if (hiveTestFixture != null) {
       
hiveTestFixture.getPluginManager().removeHivePluginFrom(cluster.drillbit());
     }
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
index 39f8655..422d44a 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.calcite.schema.Schema.TableType;
+import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.impersonation.BaseTestImpersonation;
 import org.apache.drill.exec.store.hive.HiveStoragePluginConfig;
 import org.apache.drill.test.TestBuilder;
@@ -34,6 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.BeforeClass;
 
 import static 
org.apache.drill.exec.hive.HiveTestUtilities.createDirWithPosixPermissions;
 import static org.apache.drill.exec.hive.HiveTestUtilities.executeQuery;
@@ -62,6 +64,11 @@ public class BaseTestHiveImpersonation extends 
BaseTestImpersonation {
       "(rownum INT, name STRING, gpa FLOAT, studentnum BIGINT) " +
       "partitioned by (age INT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' 
STORED AS TEXTFILE";
 
+  @BeforeClass
+  public static void setUp() {
+    HiveTestUtilities.assumeJavaVersion();
+  }
+
   protected static void prepHiveConfAndData() throws Exception {
     hiveConf = new HiveConf();
 
diff --git 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/sql/hive/TestViewSupportOnHiveTables.java
 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/sql/hive/TestViewSupportOnHiveTables.java
index 5b53113..52ef567 100644
--- 
a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/sql/hive/TestViewSupportOnHiveTables.java
+++ 
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/sql/hive/TestViewSupportOnHiveTables.java
@@ -21,6 +21,7 @@ import java.util.Objects;
 
 import org.apache.drill.categories.HiveStorageTest;
 import org.apache.drill.categories.SlowTest;
+import org.apache.drill.exec.hive.HiveTestUtilities;
 import org.apache.drill.exec.sql.TestBaseViewSupport;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -34,14 +35,15 @@ import static 
org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 public class TestViewSupportOnHiveTables extends TestBaseViewSupport {
 
   @BeforeClass
-  public static void setUp() throws Exception {
+  public static void setUp() {
+    HiveTestUtilities.assumeJavaVersion();
     Objects.requireNonNull(HIVE_TEST_FIXTURE, "Failed to configure Hive 
storage plugin, " +
         "because HiveTestBase.HIVE_TEST_FIXTURE isn't initialized!")
         .getPluginManager().addHivePluginTo(bits);
   }
 
   @AfterClass
-  public static void tearDown() throws Exception {
+  public static void tearDown() {
     if (HIVE_TEST_FIXTURE != null) {
       HIVE_TEST_FIXTURE.getPluginManager().removeHivePluginFrom(bits);
     }
diff --git a/contrib/storage-mongo/pom.xml b/contrib/storage-mongo/pom.xml
index a58ed57..cf5b98d 100644
--- a/contrib/storage-mongo/pom.xml
+++ b/contrib/storage-mongo/pom.xml
@@ -66,7 +66,7 @@
     <dependency>
       <groupId>de.flapdoodle.embed</groupId>
       <artifactId>de.flapdoodle.embed.mongo</artifactId>
-      <version>2.0.3</version>
+      <version>2.2.0</version>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
index f5cef38..c0d91a1 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/compile/MergeAdapter.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.compile;
 
 import java.lang.reflect.Modifier;
 import java.util.Collection;
-import java.util.Iterator;
+import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.drill.exec.compile.ClassTransformer.ClassSet;
@@ -30,15 +30,15 @@ import org.objectweb.asm.ClassVisitor;
 import org.objectweb.asm.ClassWriter;
 import org.objectweb.asm.FieldVisitor;
 import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.commons.ClassRemapper;
+import org.objectweb.asm.commons.MethodRemapper;
 import org.objectweb.asm.commons.Remapper;
-import org.objectweb.asm.commons.RemappingClassAdapter;
-import org.objectweb.asm.commons.RemappingMethodAdapter;
 import org.objectweb.asm.commons.SimpleRemapper;
 import org.objectweb.asm.tree.ClassNode;
-import org.objectweb.asm.tree.FieldNode;
 import org.objectweb.asm.tree.MethodNode;
 
-import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Serves two purposes. Renames all inner classes references to the outer 
class to the new name. Also adds all the
@@ -46,10 +46,10 @@ import 
org.apache.drill.shaded.guava.com.google.common.collect.Sets;
  */
 @SuppressWarnings("unused")
 class MergeAdapter extends ClassVisitor {
-  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(MergeAdapter.class);
+  private static final Logger logger = 
LoggerFactory.getLogger(MergeAdapter.class);
   private final ClassNode classToMerge;
   private final ClassSet set;
-  private final Set<String> mergingNames = Sets.newHashSet();
+  private final Set<String> mergingNames = new HashSet<>();
   private final boolean hasInit;
   private String name;
 
@@ -62,8 +62,8 @@ class MergeAdapter extends ClassVisitor {
     this.set = set;
 
     boolean hasInit = false;
-    for (Object o  : classToMerge.methods) {
-      String name = ((MethodNode)o).name;
+    for (MethodNode methodNode : classToMerge.methods) {
+      String name = methodNode.name;
       if (name.equals("<init>")) {
         continue;
       }
@@ -78,18 +78,12 @@ class MergeAdapter extends ClassVisitor {
 
   @Override
   public void visitInnerClass(String name, String outerName, String innerName, 
int access) {
-    // logger.debug(String.format("[Inner Class] Name: %s, outerName: %s, 
innerName: %s, templateName: %s, newName: %s.",
-    // name, outerName, innerName, templateName, newName));
-
     if (name.startsWith(set.precompiled.slash)) {
-//      outerName = outerName.replace(precompiled.slash, generated.slash);
       name = name.replace(set.precompiled.slash, set.generated.slash);
       int i = name.lastIndexOf('$');
       outerName = name.substring(0, i);
-      super.visitInnerClass(name, outerName, innerName, access);
-    } else {
-      super.visitInnerClass(name, outerName, innerName, access);
     }
+    super.visitInnerClass(name, outerName, innerName, access);
   }
 
   // visit the class
@@ -97,13 +91,10 @@ class MergeAdapter extends ClassVisitor {
   public void visit(int version, int access, String name, String signature, 
String superName, String[] interfaces) {
     // use the access and names of the impl class.
     this.name = name;
-    if (name.contains("$")) {
-      super.visit(version, access, name, signature, superName, interfaces);
-    } else {
-      super.visit(version, access ^ Modifier.ABSTRACT | Modifier.FINAL, name, 
signature, superName, interfaces);
+    if (!name.contains("$")) {
+      access = access ^ Modifier.ABSTRACT | Modifier.FINAL;
     }
-
-//    this.cname = name;
+    super.visit(version, access, name, signature, superName, interfaces);
   }
 
   @Override
@@ -113,23 +104,15 @@ class MergeAdapter extends ClassVisitor {
 
     // skip all abstract methods as they should have implementations.
     if ((access & Modifier.ABSTRACT) != 0 || mergingNames.contains(name)) {
-
-//      logger.debug("Skipping copy of '{}()' since it is abstract or listed 
elsewhere.", arg1);
       return null;
     }
     if (signature != null) {
       signature = signature.replace(set.precompiled.slash, 
set.generated.slash);
     }
-    // if ((access & Modifier.PUBLIC) == 0) {
-    // access = access ^ Modifier.PUBLIC ^ Modifier.PROTECTED | 
Modifier.PRIVATE;
-    // }
+
     MethodVisitor mv = super.visitMethod(access, name, desc, signature, 
exceptions);
-    if (!name.equals("<init>")) {
-      access = access | Modifier.FINAL;
-    } else {
-      if (hasInit) {
-        return new DrillInitMethodVisitor(this.name, mv);
-      }
+    if (name.equals("<init>") && hasInit) {
+      return new DrillInitMethodVisitor(this.name, mv);
     }
     return mv;
   }
@@ -137,28 +120,21 @@ class MergeAdapter extends ClassVisitor {
   @Override
   public void visitEnd() {
     // add all the fields of the class we're going to merge.
-    for (Iterator<?> it = classToMerge.fields.iterator(); it.hasNext();) {
-
-      // Special handling for nested classes. Drill uses non-static nested
-      // "inner" classes in some templates. Prior versions of Drill would
-      // create the generated nested classes as static, then this line
-      // would copy the "this$0" field to convert the static nested class
-      // into a non-static inner class. However, that approach is not
-      // compatible with plain-old Java compilation. Now, Drill generates
-      // the nested classes as non-static inner classes. As a result, we
-      // do not want to copy the hidden fields; we'll end up with two if
-      // we do.
-
-      FieldNode field = (FieldNode) it.next();
-      if (! field.name.startsWith("this$")) {
-        field.accept(this);
-      }
-    }
+    // Special handling for nested classes. Drill uses non-static nested
+    // "inner" classes in some templates. Prior versions of Drill would
+    // create the generated nested classes as static, then this line
+    // would copy the "this$0" field to convert the static nested class
+    // into a non-static inner class. However, that approach is not
+    // compatible with plain-old Java compilation. Now, Drill generates
+    // the nested classes as non-static inner classes. As a result, we
+    // do not want to copy the hidden fields; we'll end up with two if
+    // we do.
+    classToMerge.fields.stream()
+        .filter(field -> !field.name.startsWith("this$"))
+        .forEach(field -> field.accept(this));
 
     // add all the methods that we to include.
-    for (Iterator<?> it = classToMerge.methods.iterator(); it.hasNext();) {
-      MethodNode mn = (MethodNode) it.next();
-
+    for (MethodNode mn : classToMerge.methods) {
       if (mn.name.equals("<init>")) {
         continue;
       }
@@ -178,7 +154,7 @@ class MergeAdapter extends ClassVisitor {
       while (top.parent != null) {
         top = top.parent;
       }
-      mn.accept(new RemappingMethodAdapter(mn.access, mn.desc, mv,
+      mn.accept(new MethodRemapper(mv,
           new SimpleRemapper(top.precompiled.slash, top.generated.slash)));
 
     }
@@ -250,7 +226,7 @@ class MergeAdapter extends ClassVisitor {
         writerVisitor = new 
DrillCheckClassAdapter(CompilationConfig.ASM_API_VERSION,
             new CheckClassVisitorFsm(CompilationConfig.ASM_API_VERSION, 
writerVisitor), true);
       }
-      ClassVisitor remappingAdapter = new RemappingClassAdapter(writerVisitor, 
re);
+      ClassVisitor remappingAdapter = new ClassRemapper(writerVisitor, re);
       if (verifyBytecode) {
         remappingAdapter = new 
DrillCheckClassAdapter(CompilationConfig.ASM_API_VERSION,
             new CheckClassVisitorFsm(CompilationConfig.ASM_API_VERSION, 
remappingAdapter), true);
@@ -285,11 +261,11 @@ class MergeAdapter extends ClassVisitor {
   }
 
   private static class RemapClasses extends Remapper {
-    final Set<String> innerClasses = Sets.newHashSet();
-    ClassSet top;
-    ClassSet current;
+    private final Set<String> innerClasses = new HashSet<>();
+    private final ClassSet top;
+    private final ClassSet current;
 
-    public RemapClasses(final ClassSet set) {
+    public RemapClasses(ClassSet set) {
       current = set;
       ClassSet top = set;
       while (top.parent != null) {
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/metadata/MetadataAggregateHelper.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/metadata/MetadataAggregateHelper.java
index 1cca788..6f00dea 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/metadata/MetadataAggregateHelper.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/metadata/MetadataAggregateHelper.java
@@ -182,7 +182,8 @@ public class MetadataAggregateHelper {
   private void addLastModifiedCall() {
     String lastModifiedColumn = columnNamesOptions.lastModifiedTime();
     LogicalExpression lastModifiedTime;
-    if (createNewAggregations()) {
+    // it is enough to call any_value(`lmt`) for file metadata level or more 
specific metadata
+    if (context.metadataLevel().includes(MetadataType.FILE)) {
       lastModifiedTime = new FunctionCall("any_value",
           Collections.singletonList(
               FieldReference.getWithQuotedRef(lastModifiedColumn)),
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
index 2c657e6..5134f42 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
@@ -176,6 +176,10 @@ public class BatchValidator {
   }
 
   public static boolean validate(RecordBatch batch) {
+    // This is a handy place to trace batches as they flow up
+    // the DAG. Works best for single-threaded runs with a few records.
+    // System.out.println(batch.getClass().getSimpleName());
+    // RowSetFormatter.print(batch);
     ErrorReporter reporter = errorReporter(batch);
     int rowCount = batch.getRecordCount();
     int valueCount = rowCount;
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
index efa1974..1059e46 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/spnego/TestDrillSpnegoAuthenticator.java
@@ -133,7 +133,6 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
   /**
    * Test to verify response when request is sent for {@link 
WebServerConstants#SPENGO_LOGIN_RESOURCE_PATH} from
    * unauthenticated session. Expectation is client will receive response with 
Negotiate header.
-   * @throws Exception
    */
   @Test
   public void testNewSessionReqForSpnegoLogin() throws Exception {
@@ -154,7 +153,6 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
   /**
    * Test to verify response when request is sent for {@link 
WebServerConstants#SPENGO_LOGIN_RESOURCE_PATH} from
    * authenticated session. Expectation is server will find the authenticated 
UserIdentity.
-   * @throws Exception
    */
   @Test
   public void testAuthClientRequestForSpnegoLoginResource() throws Exception {
@@ -179,7 +177,6 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
    * Test to verify response when request is sent for any other resource other 
than
    * {@link WebServerConstants#SPENGO_LOGIN_RESOURCE_PATH} from authenticated 
session. Expectation is server will
    * find the authenticated UserIdentity and will not perform the 
authentication again for new resource.
-   * @throws Exception
    */
   @Test
   public void testAuthClientRequestForOtherPage() throws Exception {
@@ -203,8 +200,7 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
   /**
    * Test to verify that when request is sent for {@link 
WebServerConstants#LOGOUT_RESOURCE_PATH} then the UserIdentity
    * will be removed from the session and returned authentication will be null 
from
-   * {@link DrillSpnegoAuthenticator#validateRequest(ServletRequest, 
ServletResponse, boolean)}
-   * @throws Exception
+   * {@link 
DrillSpnegoAuthenticator#validateRequest(javax.servlet.ServletRequest, 
javax.servlet.ServletResponse, boolean)}
    */
   @Test
   public void testAuthClientRequestForLogOut() throws Exception {
@@ -228,7 +224,6 @@ public class TestDrillSpnegoAuthenticator extends BaseTest {
   /**
    * Test to verify authentication fails when client sends invalid SPNEGO 
token for the
    * {@link WebServerConstants#SPENGO_LOGIN_RESOURCE_PATH} resource.
-   * @throws Exception
    */
   @Test
   public void testSpnegoLoginInvalidToken() throws Exception {
@@ -242,28 +237,25 @@ public class TestDrillSpnegoAuthenticator extends 
BaseTest {
         spnegoHelper.clientKeytab.getAbsoluteFile());
 
     // Generate a SPNEGO token for the peer SERVER_PRINCIPAL from this 
CLIENT_PRINCIPAL
-    final String token = Subject.doAs(clientSubject, new 
PrivilegedExceptionAction<String>() {
-      @Override
-      public String run() throws Exception {
-
-        final GSSManager gssManager = GSSManager.getInstance();
-        GSSContext gssContext = null;
-        try {
-          final Oid oid = GSSUtil.GSS_SPNEGO_MECH_OID;
-          final GSSName serviceName = 
gssManager.createName(spnegoHelper.SERVER_PRINCIPAL, GSSName.NT_USER_NAME, oid);
-
-          gssContext = gssManager.createContext(serviceName, oid, null, 
GSSContext.DEFAULT_LIFETIME);
-          gssContext.requestCredDeleg(true);
-          gssContext.requestMutualAuth(true);
-
-          byte[] outToken = new byte[0];
-          outToken = gssContext.initSecContext(outToken, 0, outToken.length);
-          return Base64.encodeBase64String(outToken);
-
-        } finally {
-          if (gssContext != null) {
-            gssContext.dispose();
-          }
+    final String token = Subject.doAs(clientSubject, 
(PrivilegedExceptionAction<String>) () -> {
+
+      final GSSManager gssManager = GSSManager.getInstance();
+      GSSContext gssContext = null;
+      try {
+        final Oid oid = GSSUtil.GSS_SPNEGO_MECH_OID;
+        final GSSName serviceName = 
gssManager.createName(spnegoHelper.SERVER_PRINCIPAL, GSSName.NT_USER_NAME, oid);
+
+        gssContext = gssManager.createContext(serviceName, oid, null, 
GSSContext.DEFAULT_LIFETIME);
+        gssContext.requestCredDeleg(true);
+        gssContext.requestMutualAuth(true);
+
+        byte[] outToken = new byte[0];
+        outToken = gssContext.initSecContext(outToken, 0, outToken.length);
+        return Base64.encodeBase64String(outToken);
+
+      } finally {
+        if (gssContext != null) {
+          gssContext.dispose();
         }
       }
     });
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchemaWithMetastore.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchemaWithMetastore.java
index c89472b..28d6a6a 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchemaWithMetastore.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchemaWithMetastore.java
@@ -395,7 +395,10 @@ public class TestInfoSchemaWithMetastore extends 
ClusterTest {
   }
 
   private ZonedDateTime currentUtcTime() {
-    ZonedDateTime currentTime = ZonedDateTime.of(LocalDateTime.now(), 
ZoneId.systemDefault());
+    // Java 9 and later returns LocalDateTime with nanoseconds precision,
+    // but Java 8 returns LocalDateTime with milliseconds precision
+    // and metastore stores last modified time in milliseconds
+    ZonedDateTime currentTime = 
ZonedDateTime.of(LocalDateTime.now().withNano(0), ZoneId.systemDefault());
     return currentTime.withZoneSameInstant(ZoneId.of("UTC"));
   }
 }
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
 
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
index b8bf889..ccee4ed 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
@@ -530,7 +530,8 @@ public class TestMetastoreCommands extends ClusterTest {
             .basicRequests()
             .tableMetadata(tableInfo);
 
-        assertEquals(expectedTableMetadata, actualTableMetadata);
+        assertEquals(String.format("Table metadata mismatch for [%s] metadata 
level", analyzeLevel),
+            expectedTableMetadata, actualTableMetadata);
       } finally {
         run("analyze table dfs.tmp.`%s` drop metadata if exists", tableName);
       }
diff --git 
a/exec/memory/base/src/test/java/org/apache/drill/exec/memory/BoundsCheckingTest.java
 
b/exec/memory/base/src/test/java/org/apache/drill/exec/memory/BoundsCheckingTest.java
index 3b5c7ee..dff8555 100644
--- 
a/exec/memory/base/src/test/java/org/apache/drill/exec/memory/BoundsCheckingTest.java
+++ 
b/exec/memory/base/src/test/java/org/apache/drill/exec/memory/BoundsCheckingTest.java
@@ -17,9 +17,6 @@
  */
 package org.apache.drill.exec.memory;
 
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-
 import org.apache.drill.test.BaseTest;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -29,55 +26,47 @@ import org.junit.Test;
 
 import io.netty.buffer.DrillBuf;
 import io.netty.util.IllegalReferenceCountException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 public class BoundsCheckingTest extends BaseTest {
-  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(BoundsCheckingTest.class);
+  private static final Logger logger = 
LoggerFactory.getLogger(BoundsCheckingTest.class);
 
   private static boolean old;
 
   private RootAllocator allocator;
 
-  private static boolean setBoundsChecking(boolean enabled) throws Exception
-  {
-    Field field = 
BoundsChecking.class.getDeclaredField("BOUNDS_CHECKING_ENABLED");
-    Field modifiersField = Field.class.getDeclaredField("modifiers");
-    modifiersField.setAccessible(true);
-    modifiersField.setInt(field, field.getModifiers() & ~Modifier.FINAL);
-    boolean old = field.getBoolean(null);
-    field.setAccessible(true);
-    field.set(null, enabled);
-    return old;
+  private static boolean setBoundsChecking(boolean enabled) {
+    String oldValue = 
System.setProperty(BoundsChecking.ENABLE_UNSAFE_BOUNDS_CHECK_PROPERTY, 
String.valueOf(enabled));
+    return Boolean.parseBoolean(oldValue);
   }
 
   @BeforeClass
-  public static void setBoundsCheckingEnabled() throws Exception
-  {
+  public static void setBoundsCheckingEnabled() {
     old = setBoundsChecking(true);
   }
 
   @AfterClass
-  public static void restoreBoundsChecking() throws Exception
-  {
+  public static void restoreBoundsChecking() {
     setBoundsChecking(old);
   }
 
   @Before
-  public void setupAllocator()
-  {
+  public void setupAllocator() {
     allocator = new RootAllocator(Integer.MAX_VALUE);
   }
 
   @After
-  public void closeAllocator()
-  {
+  public void closeAllocator() {
     allocator.close();
   }
 
   @Test
-  public void testLengthCheck()
-  {
+  public void testLengthCheck() {
+    assertTrue(BoundsChecking.BOUNDS_CHECKING_ENABLED);
     try {
       BoundsChecking.lengthCheck(null, 0, 0);
       fail("expecting NullPointerException");
diff --git a/metastore/iceberg-metastore/pom.xml 
b/metastore/iceberg-metastore/pom.xml
index 04fc137..9ff2b6e 100644
--- a/metastore/iceberg-metastore/pom.xml
+++ b/metastore/iceberg-metastore/pom.xml
@@ -136,15 +136,9 @@
     </dependency>
 
     <dependency>
-      <groupId>com.sun.codemodel</groupId>
-      <artifactId>codemodel</artifactId>
-      <version>${codemodel.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>net.openhft</groupId>
-      <artifactId>compiler</artifactId>
-      <version>2.3.4</version>
+      <groupId>org.ow2.asm</groupId>
+      <artifactId>asm</artifactId>
+      <version>${asm.version}</version>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git 
a/metastore/iceberg-metastore/src/test/java/org/apache/drill/metastore/iceberg/schema/TestIcebergTableSchema.java
 
b/metastore/iceberg-metastore/src/test/java/org/apache/drill/metastore/iceberg/schema/TestIcebergTableSchema.java
index c95b9a0..3c66ba1 100644
--- 
a/metastore/iceberg-metastore/src/test/java/org/apache/drill/metastore/iceberg/schema/TestIcebergTableSchema.java
+++ 
b/metastore/iceberg-metastore/src/test/java/org/apache/drill/metastore/iceberg/schema/TestIcebergTableSchema.java
@@ -17,35 +17,27 @@
  */
 package org.apache.drill.metastore.iceberg.schema;
 
-import com.sun.codemodel.CodeWriter;
-import com.sun.codemodel.JAnnotationArrayMember;
-import com.sun.codemodel.JAnnotationUse;
-import com.sun.codemodel.JClass;
-import com.sun.codemodel.JClassAlreadyExistsException;
-import com.sun.codemodel.JCodeModel;
-import com.sun.codemodel.JDefinedClass;
-import com.sun.codemodel.JFieldVar;
-import com.sun.codemodel.JMod;
-import com.sun.codemodel.JPackage;
-import net.openhft.compiler.CompilerUtils;
 import org.apache.drill.metastore.MetastoreFieldDefinition;
 import org.apache.drill.metastore.iceberg.IcebergBaseTest;
 import org.apache.drill.metastore.iceberg.exceptions.IcebergMetastoreException;
-import org.apache.drill.metastore.metadata.MetadataType;
 import org.apache.iceberg.PartitionSpec;
 import org.apache.iceberg.Schema;
 import org.apache.iceberg.types.Types;
 import org.junit.Test;
+import org.objectweb.asm.ClassWriter;
+import org.objectweb.asm.FieldVisitor;
+import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.Type;
+import org.objectweb.asm.signature.SignatureVisitor;
+import org.objectweb.asm.signature.SignatureWriter;
 
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.stream.Stream;
 
+import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
+import static org.objectweb.asm.Opcodes.V1_8;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -53,40 +45,36 @@ import static org.junit.Assert.assertNull;
 public class TestIcebergTableSchema extends IcebergBaseTest {
 
   @Test
-  public void testAllTypes() throws Exception {
+  public void testAllTypes() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"AllTypes") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JFieldVar stringField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "stringField");
+      void addFields(ClassWriter classWriter) {
+        FieldVisitor stringField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"stringField", String.class);
         annotate(stringField);
 
-        JFieldVar intField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
int.class, "intField");
+        FieldVisitor intField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"intField", int.class);
         annotate(intField);
 
-        JFieldVar integerField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Integer.class, "integerField");
+        FieldVisitor integerField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"integerField", Integer.class);
         annotate(integerField);
 
-        JFieldVar longField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Long.class, "longField");
+        FieldVisitor longField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"longField", Long.class);
         annotate(longField);
 
-        JFieldVar floatField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Float.class, "floatField");
+        FieldVisitor floatField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"floatField", Float.class);
         annotate(floatField);
 
-        JFieldVar doubleField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Double.class, "doubleField");
+        FieldVisitor doubleField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"doubleField", Double.class);
         annotate(doubleField);
 
-        JFieldVar booleanField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Boolean.class, "booleanField");
+        FieldVisitor booleanField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"booleanField", Boolean.class);
         annotate(booleanField);
 
-        JCodeModel jCodeModel = jDefinedClass.owner();
-
-        JClass listRef = jCodeModel.ref(List.class).narrow(String.class);
-        JFieldVar listField = jDefinedClass.field(DEFAULT_FIELD_MODE, listRef, 
"listField");
+        FieldVisitor listField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"listField", List.class, String.class);
         annotate(listField);
 
-        JClass mapRef = jCodeModel.ref(Map.class).narrow(String.class, 
Float.class);
-        JFieldVar mapField = jDefinedClass.field(DEFAULT_FIELD_MODE, mapRef, 
"mapField");
+        FieldVisitor mapField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"mapField", Map.class, String.class, Float.class);
         annotate(mapField);
       }
 
@@ -114,15 +102,15 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
   }
 
   @Test
-  public void testIgnoreUnannotatedFields() throws Exception {
+  public void testIgnoreUnannotatedFields() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"IgnoreUnannotatedFields") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JFieldVar stringField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "stringField");
+      void addFields(ClassWriter classWriter) {
+        FieldVisitor stringField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"stringField", String.class);
         annotate(stringField);
 
-        jDefinedClass.field(DEFAULT_FIELD_MODE, Integer.class, "integerField");
+        addField(classWriter, Opcodes.ACC_PRIVATE, "integerField", 
Integer.class);
       }
     }.generate();
 
@@ -132,16 +120,28 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
   }
 
   @Test
-  public void testNestedComplexType() throws Exception {
+  public void testNestedComplexType() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"NestedComplexType") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JCodeModel jCodeModel = jDefinedClass.owner();
-
-        JClass nestedListRef = jCodeModel.ref(List.class).narrow(String.class);
-        JClass listRef = jCodeModel.ref(List.class).narrow(nestedListRef);
-        JFieldVar listField = jDefinedClass.field(DEFAULT_FIELD_MODE, listRef, 
"listField");
+      void addFields(ClassWriter classWriter) {
+        String descriptor = Type.getType(List.class).getDescriptor();
+
+        String signature = FieldSignatureBuilder.builder()
+            .declareType(List.class)
+            .startGeneric()
+                .declareType(List.class)
+                .startGeneric()
+                    .declareType(String.class)
+                    .endType()
+                .endGeneric()
+                .endType()
+            .endGeneric()
+            .endType()
+            .buildSignature();
+
+        FieldVisitor listField =
+            classWriter.visitField(Opcodes.ACC_PRIVATE, "stringField", 
descriptor, signature, null);
         annotate(listField);
       }
     }.generate();
@@ -152,12 +152,12 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
   }
 
   @Test
-  public void testUnpartitionedPartitionSpec() throws Exception {
+  public void testUnpartitionedPartitionSpec() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"UnpartitionedPartitionSpec") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JFieldVar stringField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "stringField");
+      void addFields(ClassWriter classWriter) {
+        FieldVisitor stringField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"stringField", String.class);
         annotate(stringField);
       }
     }.generate();
@@ -169,24 +169,24 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
   }
 
   @Test
-  public void testPartitionedPartitionSpec() throws Exception {
+  public void testPartitionedPartitionSpec() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"PartitionedPartitionSpec") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JFieldVar partKey1 = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "partKey1");
+      void addFields(ClassWriter classWriter) {
+        FieldVisitor partKey1 = addField(classWriter, Opcodes.ACC_PRIVATE, 
"partKey1", String.class);
         annotate(partKey1);
 
-        JFieldVar partKey2 = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "partKey2");
+        FieldVisitor partKey2 = addField(classWriter, Opcodes.ACC_PRIVATE, 
"partKey2", String.class);
         annotate(partKey2);
 
-        JFieldVar partKey3 = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "partKey3");
+        FieldVisitor partKey3 = addField(classWriter, Opcodes.ACC_PRIVATE, 
"partKey3", String.class);
         annotate(partKey3);
 
-        JFieldVar integerField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Integer.class, "integerField");
+        FieldVisitor integerField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"integerField", Integer.class);
         annotate(integerField);
 
-        JFieldVar booleanField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Boolean.class, "booleanField");
+        FieldVisitor booleanField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"booleanField", Boolean.class);
         annotate(booleanField);
       }
     }.generate();
@@ -216,15 +216,15 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
   }
 
   @Test
-  public void testUnMatchingPartitionSpec() throws Exception {
+  public void testUnMatchingPartitionSpec() {
     Class<?> clazz = new ClassGenerator(getClass().getSimpleName() + 
"UnMatchingPartitionSpec") {
 
       @Override
-      void addFields(JDefinedClass jDefinedClass) {
-        JFieldVar partKey1 = jDefinedClass.field(DEFAULT_FIELD_MODE, 
String.class, "partKey1");
+      void addFields(ClassWriter classWriter) {
+        FieldVisitor partKey1 = addField(classWriter, Opcodes.ACC_PRIVATE, 
"partKey1", String.class);
         annotate(partKey1);
 
-        JFieldVar integerField = jDefinedClass.field(DEFAULT_FIELD_MODE, 
Integer.class, "integerField");
+        FieldVisitor integerField = addField(classWriter, Opcodes.ACC_PRIVATE, 
"integerField", Integer.class);
         annotate(integerField);
       }
     }.generate();
@@ -238,9 +238,7 @@ public class TestIcebergTableSchema extends IcebergBaseTest 
{
    * Generates and loads class at the runtime with specified fields.
    * Fields may or may not be annotated.
    */
-  private abstract class ClassGenerator {
-
-    final int DEFAULT_FIELD_MODE = JMod.PRIVATE;
+  private static abstract class ClassGenerator {
 
     private final String name;
 
@@ -248,53 +246,112 @@ public class TestIcebergTableSchema extends 
IcebergBaseTest {
       this.name = name;
     }
 
-    Class<?> generate() throws JClassAlreadyExistsException, IOException, 
ClassNotFoundException {
-      JCodeModel jCodeModel = prepareModel();
-      ByteArrayStreamCodeWriter codeWriter = new ByteArrayStreamCodeWriter();
-      jCodeModel.build(codeWriter);
+    Class<?> generate() {
+      ClassWriter classWriter = generateClass();
 
-      String sourceCode = codeWriter.sourceCode();
-      return CompilerUtils.CACHED_COMPILER.loadFromJava(name, sourceCode);
+      byte[] bytes = classWriter.toByteArray();
+      return new ClassLoader() {
+        public Class<?> injectClass(String name, byte[] classBytes) {
+          return defineClass(name, classBytes, 0, classBytes.length);
+        }
+      }.injectClass(name, bytes);
     }
 
-    private JCodeModel prepareModel() throws JClassAlreadyExistsException {
-      JCodeModel jCodeModel = new JCodeModel();
-      JPackage jPackage = jCodeModel._package("");
-      JDefinedClass jDefinedClass = jPackage._class(name);
-      addFields(jDefinedClass);
-      return jCodeModel;
+    public FieldVisitor addField(ClassWriter classWriter, int access, String 
fieldName, Class<?> clazz, Class<?>... genericTypes) {
+      String descriptor = Type.getType(clazz).getDescriptor();
+
+      String signature = null;
+
+      if (genericTypes.length > 0) {
+        FieldSignatureBuilder fieldSignatureBuilder = 
FieldSignatureBuilder.builder()
+            .declareType(clazz)
+            .startGeneric();
+        for (Class<?> genericType : genericTypes) {
+          fieldSignatureBuilder
+              .declareType(genericType)
+              .endType();
+        }
+        signature = fieldSignatureBuilder
+            .endGeneric()
+            .endType()
+            .buildSignature();
+      }
+
+      return classWriter.visitField(access, fieldName, descriptor, signature, 
null);
     }
 
-    void annotate(JFieldVar field) {
-      annotate(field, MetadataType.ALL);
+    void annotate(FieldVisitor field) {
+      
field.visitAnnotation(Type.getType(MetastoreFieldDefinition.class).getDescriptor(),
 true);
     }
 
-    void annotate(JFieldVar field, MetadataType... scopes) {
-      JAnnotationUse annotate = field.annotate(MetastoreFieldDefinition.class);
-      assert scopes.length != 0;
-      JAnnotationArrayMember scopesParam = annotate.paramArray("scopes");
-      Stream.of(scopes).forEach(scopesParam::param);
+    private ClassWriter generateClass() {
+      ClassWriter classWriter = new ClassWriter(ClassWriter.COMPUTE_FRAMES);
+      classWriter.visit(V1_8, ACC_PUBLIC, name, null, 
Type.getInternalName(Object.class), null);
+      addFields(classWriter);
+      classWriter.visitEnd();
+
+      return classWriter;
     }
 
-    abstract void addFields(JDefinedClass jDefinedClass);
+    abstract void addFields(ClassWriter classWriter);
+  }
 
-    private class ByteArrayStreamCodeWriter extends CodeWriter {
+  /**
+   * Helper class for constructing field type signature string.
+   * <p>
+   * Example of usage:
+   * <p>
+   * Desired type: {@code List<Map<String, List<Integer>>>}
+   * <pre><code>
+   *         String signature = FieldSignatureBuilder.builder()
+   *           .declareType(List.class)
+   *           .startGeneric()
+   *               .declareType(Map.class)
+   *               .startGeneric()
+   *                   .declareType(String.class)
+   *                   .endType()
+   *                   .declareType(List.class)
+   *                   .startGeneric()
+   *                       .declareType(Integer.class)
+   *                       .endType()
+   *                   .endGeneric()
+   *                   .endType()
+   *               .endGeneric()
+   *               .endType()
+   *           .endGeneric()
+   *           .endType()
+   *           .buildSignature();
+   * </code></pre>
+   */
+  private static class FieldSignatureBuilder {
+    private final SignatureVisitor signatureVisitor = new SignatureWriter();
 
-      private final ByteArrayOutputStream outputStream = new 
ByteArrayOutputStream();
+    public FieldSignatureBuilder declareType(Class<?> clazz) {
+      signatureVisitor.visitClassType(Type.getInternalName(clazz));
+      return this;
+    }
 
-      @Override
-      public OutputStream openBinary(JPackage pkg, String fileName) {
-        return outputStream;
-      }
+    public FieldSignatureBuilder startGeneric() {
+      signatureVisitor.visitTypeArgument('=');
+      return this;
+    }
 
-      @Override
-      public void close() {
-        // no need to close byte array stream
-      }
+    public FieldSignatureBuilder endGeneric() {
+      signatureVisitor.visitSuperclass();
+      return this;
+    }
 
-      String sourceCode() {
-        return new String(outputStream.toByteArray());
-      }
+    public FieldSignatureBuilder endType() {
+      signatureVisitor.visitEnd();
+      return this;
+    }
+
+    public String buildSignature() {
+      return signatureVisitor.toString();
+    }
+
+    public static FieldSignatureBuilder builder() {
+      return new FieldSignatureBuilder();
     }
   }
 }
diff --git a/pom.xml b/pom.xml
index 4815771..fb6735c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -86,14 +86,14 @@
     <hbase.version>2.2.2</hbase.version>
     <fmpp.version>1.0</fmpp.version>
     <freemarker.version>2.3.28</freemarker.version>
-    <javassist.version>3.25.0-GA</javassist.version>
+    <javassist.version>3.26.0-GA</javassist.version>
     <msgpack.version>0.6.6</msgpack.version>
     <reflections.version>0.9.10</reflections.version>
     <avro.version>1.9.0</avro.version>
     <metrics.version>4.0.2</metrics.version>
     <jetty.version>9.3.25.v20180904</jetty.version>
     <jersey.version>2.25.1</jersey.version>
-    <asm.version>7.0</asm.version>
+    <asm.version>7.2</asm.version>
     <excludedGroups />
     <memoryMb>4096</memoryMb>
     <directMemoryMb>4096</directMemoryMb>
@@ -109,6 +109,7 @@
     <codemodel.version>2.6</codemodel.version>
     <joda.version>2.10.5</joda.version>
     <javax.el.version>3.0.0</javax.el.version>
+    <surefire.version>3.0.0-M4</surefire.version>
   </properties>
 
   <scm>
@@ -553,7 +554,7 @@
                   <version>[{$lowestMavenVersion},4)</version>
                 </requireMavenVersion>
                 <requireJavaVersion>
-                  <version>[1.8,12)</version>
+                  <version>[1.8,14)</version>
                 </requireJavaVersion>
               </rules>
             </configuration>
@@ -760,7 +761,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-surefire-plugin</artifactId>
-          <version>3.0.0-M3</version>
+          <version>${surefire.version}</version>
           <executions>
             <execution>
               <id>default-test</id>
@@ -774,7 +775,7 @@
             <dependency>
               <groupId>org.apache.maven.surefire</groupId>
               <artifactId>surefire-junit47</artifactId>
-              <version>3.0.0-M3</version>
+              <version>${surefire.version}</version>
             </dependency>
           </dependencies>
           <configuration>
@@ -915,6 +916,14 @@
               </goals>
             </execution>
           </executions>
+          <dependencies>
+            <!-- Specifies asm version which supports JDK 12+ -->
+            <dependency>
+              <groupId>org.ow2.asm</groupId>
+              <artifactId>asm</artifactId>
+              <version>${asm.version}</version>
+            </dependency>
+          </dependencies>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
diff --git a/tools/fmpp/pom.xml b/tools/fmpp/pom.xml
index ac12752..327542c 100644
--- a/tools/fmpp/pom.xml
+++ b/tools/fmpp/pom.xml
@@ -82,7 +82,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-plugin-plugin</artifactId>
-        <version>3.4</version>
+        <version>3.6.0</version>
         <configuration>
           <goalPrefix>drill-fmpp</goalPrefix>
         </configuration>
@@ -102,6 +102,14 @@
             <phase>process-classes</phase>
           </execution>
          </executions>
+        <!-- Specifies asm version which supports JDK 9+ -->
+        <dependencies>
+          <dependency>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm</artifactId>
+            <version>${asm.version}</version>
+          </dependency>
+        </dependencies>
        </plugin>
     </plugins>
   </build>

Reply via email to