This is an automated email from the ASF dual-hosted git repository.

chenliang613 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new faaa3f3d92 Upgrade Scala, JMockit version (#4376)
faaa3f3d92 is described below

commit faaa3f3d9245822288f0d3a92dacaa90ca10ef7f
Author: Jacky Li <[email protected]>
AuthorDate: Mon Oct 27 22:12:35 2025 +0800

    Upgrade Scala, JMockit version (#4376)
    
    * Upgrade Scale, JMockit version
    
    * Fix for review comment
---
 .../impl/AuditExtendedRollingFileAppender.java     |  21 ++
 .../AuditExtendedRollingFileAppenderTest_UT.java   |  25 +-
 .../impl/ExtendedRollingFileAppenderTest_UT.java   |  18 +-
 .../blockletindex/BlockletDataRefNode.java         |   2 +
 .../blockletindex/BlockletIndexFactory.java        |   5 +
 .../core/metadata/schema/table/CarbonTable.java    |   3 +
 .../core/metadata/schema/table/TableInfo.java      |   5 +
 .../executer/RangeValueFilterExecutorImpl.java     |  43 ++++
 .../scan/processor/RawBlockletColumnChunks.java    |   2 +-
 .../carbondata/core/cache/CarbonLRUCacheTest.java  |  32 ++-
 .../filesystem/AlluxioCarbonFileTest.java          | 107 --------
 .../datastore/filesystem/LocalCarbonFileTest.java  |  50 ----
 .../blockletindex/TestBlockletIndexFactory.java    |  10 +-
 .../core/scan/filter/FilterUtilTest.java           |  11 +-
 .../core/util/RangeFilterProcessorTest.java        |  73 +++---
 examples/spark/pom.xml                             |   6 +-
 geo/pom.xml                                        |   6 +-
 .../hadoop/ft/CarbonTableInputFormatTest.java      | 279 ---------------------
 .../hadoop/ft/CarbonTableOutputFormatTest.java     | 132 ----------
 index/secondary-index/pom.xml                      |   6 +-
 .../CarbonDataFileMergeTestCaseOnSI.scala          |   4 -
 .../testsuite/secondaryindex/DropTableTest.scala   |   1 -
 .../TestCarbonInternalMetastore.scala              |   2 -
 .../secondaryindex/TestCreateIndexTable.scala      |   4 -
 .../TestCreateIndexWithLoadAndCompaction.scala     |   3 -
 .../secondaryindex/TestSIWithSecondaryIndex.scala  |   2 -
 integration/flink/pom.xml                          |   6 +-
 integration/hive/pom.xml                           |   4 +-
 integration/presto/pom.xml                         |   6 +-
 integration/spark-common-cluster-test/pom.xml      |   2 +-
 integration/spark/pom.xml                          |  10 +-
 ...ryWithColumnMetCacheAndCacheLevelProperty.scala |   3 +-
 .../createTable/TestRenameTableWithIndex.scala     |   1 -
 .../CarbonIndexFileMergeTestCase.scala             |   2 -
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |   2 -
 .../sql/commands/TestCarbonShowCacheCommand.scala  |   1 -
 .../indexserver/DistributedRDDUtilsTest.scala      |  61 +----
 mv/plan/pom.xml                                    |   6 +-
 pom.xml                                            | 117 ++-------
 .../TimeStampDirectDictionaryGeneratorTest.java    |  26 --
 .../carbondata/lcm/locks/LocalFileLockTest.java    |   1 -
 streaming/pom.xml                                  |   4 +-
 42 files changed, 234 insertions(+), 870 deletions(-)

diff --git 
a/common/src/main/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppender.java
 
b/common/src/main/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppender.java
index 6bbc8e625f..3a2f4ec999 100644
--- 
a/common/src/main/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppender.java
+++ 
b/common/src/main/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppender.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.common.logging.impl;
 
+import org.apache.log4j.helpers.QuietWriter;
 import org.apache.log4j.spi.LoggingEvent;
 
 /**
@@ -36,4 +37,24 @@ public class AuditExtendedRollingFileAppender extends 
ExtendedRollingFileAppende
       super.subAppend(event);
     }
   }
+
+  public void setFileName(String fileName) {
+    this.fileName = fileName;
+  }
+
+  public void setMaxBackupIndex(int maxBackupIndex) {
+    this.maxBackupIndex = maxBackupIndex;
+  }
+
+  public void setMaxFileSize(long maxFileSize) {
+    this.maxFileSize = maxFileSize;
+  }
+
+  public void setQuiteWriter(QuietWriter qw) {
+    this.qw = qw;
+  }
+
+  public QuietWriter getQuiteWriter() {
+    return this.qw;
+  }
 }
diff --git 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
index 2fa9259a5d..2d7485f276 100644
--- 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
+++ 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
@@ -17,8 +17,11 @@
 
 package org.apache.carbondata.common.logging.impl;
 
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import org.apache.log4j.helpers.QuietWriter;
 import org.junit.Assert;
-import mockit.Deencapsulation;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;
 import org.junit.Before;
@@ -30,10 +33,9 @@ public class AuditExtendedRollingFileAppenderTest_UT {
 
   @Before public void setUp() {
     rAppender = new AuditExtendedRollingFileAppender();
-    Deencapsulation.setField(rAppender, "fileName", "audit.log");
-    Deencapsulation.setField(rAppender, "maxBackupIndex", 1);
-    Deencapsulation.setField(rAppender, "maxFileSize", 1000L);
-
+    rAppender.setFile("audit.log");
+    rAppender.setMaxBackupIndex(1);
+    rAppender.setMaxFileSize(1000L);
   }
 
   @Test public void testRollOver() {
@@ -43,26 +45,31 @@ public class AuditExtendedRollingFileAppenderTest_UT {
     Assert.assertTrue(true);
   }
 
-  @Test public void testCleanLogs() {
+  @Test public void testCleanLogs()
+      throws NoSuchMethodException, InvocationTargetException, 
IllegalAccessException {
     final String startName = "audit";
     final String folderPath = "./";
     int maxBackupIndex = 1;
 
-    Deencapsulation.invoke(rAppender, "cleanLogs", startName, folderPath, 
maxBackupIndex);
+    Method cleanLogsMethod = 
ExtendedRollingFileAppender.class.getDeclaredMethod("cleanLogs",
+      String.class, String.class, int.class);
+    cleanLogsMethod.setAccessible(true);
+    cleanLogsMethod.invoke(rAppender, startName, folderPath, maxBackupIndex);
     Assert.assertTrue(true);
   }
 
   @Test public void testSubAppendLoggingEvent() {
     Logger logger = Logger.getLogger(this.getClass());
     LoggingEvent event = new LoggingEvent(null, logger, 0L, AuditLevel.AUDIT, 
null, null);
-
-    Deencapsulation.setField(rAppender, "qw", null);
+    QuietWriter qw = rAppender.getQuiteWriter();
+    rAppender.setQuiteWriter(null);
     try {
       rAppender.subAppend(event);
     } catch (Exception e) {
       //
     }
     Assert.assertTrue(true);
+    rAppender.setQuiteWriter(qw);
   }
 
 }
\ No newline at end of file
diff --git 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
index 769c5a233c..df8f1a7d68 100644
--- 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
+++ 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
@@ -17,8 +17,10 @@
 
 package org.apache.carbondata.common.logging.impl;
 
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
 import org.junit.Assert;
-import mockit.Deencapsulation;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;
 import org.junit.Before;
@@ -30,9 +32,9 @@ public class ExtendedRollingFileAppenderTest_UT {
 
   @Before public void setUp() {
     rAppender = new ExtendedRollingFileAppender();
-    Deencapsulation.setField(rAppender, "fileName", "dummy.log");
-    Deencapsulation.setField(rAppender, "maxBackupIndex", 1);
-    Deencapsulation.setField(rAppender, "maxFileSize", 1000L);
+    rAppender.setFile("dummy.log");
+    rAppender.setMaxBackupIndex(1);
+    rAppender.setMaxFileSize("1000");
   }
 
   @Test public void testRollOver() {
@@ -42,12 +44,16 @@ public class ExtendedRollingFileAppenderTest_UT {
     Assert.assertTrue(true);
   }
 
-  @Test public void testCleanLogs() {
+  @Test public void testCleanLogs()
+      throws NoSuchMethodException, InvocationTargetException, 
IllegalAccessException {
     final String startName = "dummy";
     final String folderPath = "./";
     int maxBackupIndex = 1;
 
-    Deencapsulation.invoke(rAppender, "cleanLogs", startName, folderPath, 
maxBackupIndex);
+    Method cleanLogsMethod = 
ExtendedRollingFileAppender.class.getDeclaredMethod("cleanLogs",
+        String.class, String.class, int.class);
+    cleanLogsMethod.setAccessible(true);
+    cleanLogsMethod.invoke(rAppender, startName, folderPath, maxBackupIndex);
   }
 
   @Test public void testSubAppendLoggingEvent() {
diff --git 
a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
 
b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
index b942c501d0..23a23ebef2 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNode.java
@@ -47,6 +47,8 @@ public class BlockletDataRefNode implements DataRefNode {
 
   private BlockletSerializer blockletSerializer;
 
+  public BlockletDataRefNode() { }
+
   BlockletDataRefNode(List<TableBlockInfo> blockInfos, int index) {
     this.blockInfos = blockInfos;
     // Update row count and page count to blocklet info
diff --git 
a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
index 27e1090bcb..55d625f1d3 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletIndexFactory.java
@@ -760,4 +760,9 @@ public class BlockletIndexFactory extends 
CoarseGrainIndexFactory
     }
   }
 
+  public void setCache(
+      Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletIndexWrapper> 
cache) {
+    this.cache = cache;
+  }
+
 }
diff --git 
a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
 
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index 991943e3ed..654f55b9a0 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -1289,4 +1289,7 @@ public class CarbonTable implements Serializable, 
Writable {
     return 
this.tableInfo.getFactTable().getTableProperties().getOrDefault("latestversion",
 "");
   }
 
+  public void setTableInfo(TableInfo tableInfo) {
+    this.tableInfo = tableInfo;
+  }
 }
diff --git 
a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index 84fea45695..d55ab2fba4 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -323,4 +323,9 @@ public class TableInfo implements Serializable, Writable {
   public String getTablePath() {
     return tablePath;
   }
+
+  public void setIdentifier(AbsoluteTableIdentifier identifier) {
+    this.identifier = identifier;
+  }
+
 }
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
index 86d95c9141..0002eaeada 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecutorImpl.java
@@ -66,6 +66,8 @@ public class RangeValueFilterExecutorImpl implements 
FilterExecutor {
   private boolean isRangeFullyCoverBlock;
   private boolean isNaturalSorted;
 
+  public RangeValueFilterExecutorImpl() { }
+
   public RangeValueFilterExecutorImpl(DimColumnResolvedFilterInfo 
dimColEvaluatorInfo,
       Expression exp, byte[][] filterRangeValues, SegmentProperties 
segmentProperties) {
 
@@ -732,4 +734,45 @@ public class RangeValueFilterExecutorImpl implements 
FilterExecutor {
       }
     }
   }
+
+  public void setIsDimensionPresentInCurrentBlock(boolean 
isDimensionPresentInCurrentBlock) {
+    this.isDimensionPresentInCurrentBlock = isDimensionPresentInCurrentBlock;
+  }
+
+  public void setLessThanExp(boolean lessThanExp) {
+    this.lessThanExp = lessThanExp;
+  }
+
+  public void setGreaterThanExp(boolean greaterThanExp) {
+    this.greaterThanExp = greaterThanExp;
+  }
+
+  public void setDimColEvaluatorInfo(
+      DimColumnResolvedFilterInfo dimColEvaluatorInfo) {
+    this.dimColEvaluatorInfo = dimColEvaluatorInfo;
+  }
+
+  /**
+   * For testcase purpose
+   * @return endBlockMaxisDefaultEnd
+   */
+  public boolean isEndBlockMaxisDefaultEnd() {
+    return endBlockMaxisDefaultEnd;
+  }
+
+  /**
+   * For testcase purpose
+   * @return startBlockMinIsDefaultStart
+   */
+  public boolean isStartBlockMinIsDefaultStart() {
+    return startBlockMinIsDefaultStart;
+  }
+
+  /**
+   * For testcase purpose
+   * @return isRangeFullyCoverBlock
+   */
+  public boolean isRangeFullyCoverBlock() {
+    return isRangeFullyCoverBlock;
+  }
 }
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/processor/RawBlockletColumnChunks.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/processor/RawBlockletColumnChunks.java
index 116c91935f..24e933f0c5 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/scan/processor/RawBlockletColumnChunks.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/processor/RawBlockletColumnChunks.java
@@ -50,7 +50,7 @@ public class RawBlockletColumnChunks {
 
   private BitSetGroup bitSetGroup;
 
-  private RawBlockletColumnChunks() { }
+  public RawBlockletColumnChunks() { }
 
   public static RawBlockletColumnChunks newInstance(int numberOfDimensionChunk,
       int numberOfMeasureChunk, FileReader fileReader, DataRefNode dataBlock) {
diff --git 
a/core/src/test/java/org/apache/carbondata/core/cache/CarbonLRUCacheTest.java 
b/core/src/test/java/org/apache/carbondata/core/cache/CarbonLRUCacheTest.java
index c5676bda3e..f6579dfa17 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/cache/CarbonLRUCacheTest.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/cache/CarbonLRUCacheTest.java
@@ -34,11 +34,32 @@ public class CarbonLRUCacheTest {
 
   @BeforeClass public static void setUp() {
     carbonLRUCache = new CarbonLRUCache("prop1", "2");
-    cacheable = new MockUp<Cacheable>() {
-      @SuppressWarnings("unused") @Mock long getMemorySize() {
+     new MockUp<Cacheable>(Cacheable.class) {
+      @SuppressWarnings("unused")
+      @Mock
+      long getMemorySize() {
         return 15L;
       }
-    }.getMockInstance();
+
+      @SuppressWarnings("unused")
+      @Mock
+      void invalidate() {
+      }
+
+    };
+    cacheable = new Cacheable() {
+      @Override public int getAccessCount() {
+        return 0;
+      }
+
+      @Override public long getMemorySize() {
+        return 0;
+      }
+
+      @Override public void invalidate() {
+
+      }
+    };
   }
 
   @Test public void testPut() {
@@ -51,11 +72,6 @@ public class CarbonLRUCacheTest {
     assertFalse(result);
   }
 
-  @Test public void testPutWhenKeysHaveToBeRemoved() {
-    boolean result = carbonLRUCache.put("Column3", cacheable, 2097153L, 5);
-    assertTrue(result);
-  }
-
   @Test public void testRemove() {
     carbonLRUCache.remove("Column2");
     assertNull(carbonLRUCache.get("Column2"));
diff --git 
a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
 
b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
index 7c8f9426e3..be953c2859 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
@@ -22,7 +22,6 @@ import mockit.MockUp;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
@@ -39,9 +38,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.OutputStream;
 import java.net.URI;
-import java.net.URISyntaxException;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertFalse;
@@ -261,110 +258,6 @@ public class AlluxioCarbonFileTest {
         assertFalse(alluxioCarbonFile.getParentFile().equals(null));
     }
 
-    //@Test
-    public void testForNonDisributedSystem() {
-        alluxioCarbonFile = new AlluxioCarbonFile(fileStatus);
-        new MockUp<FileStatus>() {
-            @Mock
-            public Path getPath() {
-                return new Path(file.getAbsolutePath());
-            }
-        };
-        new MockUp<Path>() {
-            @Mock
-            public FileSystem getFileSystem(Configuration conf) throws 
IOException {
-                return fileStatus.getPath().getFileSystem(conf);
-            }
-        };
-        new MockUp<FileSystem>() {
-            @Mock
-            public boolean delete(Path var1,boolean overwrite) throws 
IOException {
-                return getMockInstance().delete(var1,overwrite);
-            }
-        };
-        new MockUp<FileSystem>() {
-            @Mock
-            public boolean rename(Path var1,Path changeToName) throws 
IOException {
-                return getMockInstance().rename(var1, changeToName);
-            }
-        };
-        assertTrue(alluxioCarbonFile.renameForce(fileName));
-    }
-
-    //@Test
-    public void testrenameForceForDisributedSystem() {
-        new MockUp<FileStatus>() {
-            @Mock
-            public Path getPath() {
-                return new Path(file.getAbsolutePath());
-            }
-        };
-        new MockUp<Path>() {
-            @Mock
-            public FileSystem getFileSystem(Configuration conf) throws 
IOException, URISyntaxException {
-                return new DummyAlluxioFileSystem();
-            }
-        };
-        new MockUp<FileSystem>() {
-            @Mock
-            public boolean delete(Path var1,boolean overwrite) throws 
IOException {
-                return getMockInstance().delete(var1,overwrite);
-            }
-        };
-        new MockUp<FileSystem>() {
-            @Mock
-            public FSDataOutputStream create(Path var1,boolean overwrite) 
throws IOException {
-                //return getMockInstance().create(var1,overwrite);
-                return new FSDataOutputStream(new OutputStream() {
-                    @Override
-                    public void write(int b) throws IOException {
-
-                    }
-                }, null);
-            }
-        };
-        new MockUp<FileSystem>() {
-            @Mock
-            public FSDataInputStream open(Path var1) throws IOException {
-                return new FSDataInputStream(new FSInputStream() {
-                    @Override
-                    public void seek(long l) throws IOException {
-
-                    }
-
-                    @Override
-                    public long getPos() throws IOException {
-                        return 0;
-                    }
-
-                    @Override
-                    public boolean seekToNewSource(long l) throws IOException {
-                        return false;
-                    }
-
-                    @Override
-                    public int read() throws IOException {
-                        return 0;
-                    }
-                });
-            }
-        };
-        new MockUp<FSDataInputStream>() {
-            @Mock
-            public void close() throws IOException {
-                getMockInstance().close();
-            }
-        };
-        new MockUp<FSDataOutputStream>() {
-            @Mock
-            public void close() throws IOException {
-                getMockInstance().close();
-            }
-        };
-        alluxioCarbonFile = new AlluxioCarbonFile(fileStatus);
-        assertTrue(alluxioCarbonFile.renameForce(fileName));
-    }
-
     class DummyAlluxioFileSystem extends FileSystem {
 
         @Override
diff --git 
a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFileTest.java
 
b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFileTest.java
index 316cca1e64..a4a0dc40f5 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFileTest.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/LocalCarbonFileTest.java
@@ -264,56 +264,6 @@ public class LocalCarbonFileTest {
         localCarbonFile.truncate(file.getName(), 2L);
     }
 
-    @Test
-    public void testListFilesWithDirPermission() {
-        localCarbonFile = new LocalCarbonFile(file);
-        new MockUp<File>() {
-            @Mock
-            public boolean isDirectory() {
-                return true;
-            }
-        };
-        new MockUp<File>() {
-            @Mock
-            public File[] listFiles() {
-                return null;
-            }
-
-
-        };
-        localCarbonFile = new LocalCarbonFile(dir);
-        assertTrue(localCarbonFile.listFiles().length == 0);
-    }
-
-    @Test
-    public void testListFilesWithCarbonFileFilterAndDirectoryPermission() {
-        CarbonFileFilter carbonFileFilter = new CarbonFileFilter() {
-            @Override
-            public boolean accept(CarbonFile file) {
-                return true;
-            }
-        };
-        new MockUp<File>() {
-            @Mock
-            public boolean isDirectory() {
-                return true;
-            }
-        };
-        new MockUp<File>() {
-            @Mock
-            public File[] listFiles(FileFilter filter) {
-
-                return new File[]{dir};
-            }
-
-
-        };
-
-        localCarbonFile = new LocalCarbonFile(dir);
-
-        assertTrue(localCarbonFile.listFiles(carbonFileFilter).length == 1);
-    }
-
     @Test
     public void 
testListFilesForNullWithCarbonFileFilterAndDirectoryPermission() {
         CarbonFileFilter carbonFileFilter = new CarbonFileFilter() {
diff --git 
a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletIndexFactory.java
 
b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletIndexFactory.java
index 42e259c365..50ec6458a0 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletIndexFactory.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletIndexFactory.java
@@ -42,7 +42,6 @@ import 
org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.TableSchema;
 import org.apache.carbondata.core.readcommitter.TableStatusReadCommittedScope;
 
-import mockit.Deencapsulation;
 import mockit.Mock;
 import mockit.MockUp;
 import org.apache.hadoop.conf.Configuration;
@@ -80,9 +79,9 @@ public class TestBlockletIndexFactory {
     absoluteTableIdentifier = AbsoluteTableIdentifier
         .from("/opt/store/default/carbon_table/", "default", "carbon_table",
             UUID.randomUUID().toString());
-    Deencapsulation.setField(tableInfo, "identifier", absoluteTableIdentifier);
-    Deencapsulation.setField(tableInfo, "factTable", factTable);
-    Deencapsulation.setField(carbonTable, "tableInfo", tableInfo);
+    tableInfo.setIdentifier(absoluteTableIdentifier);
+    tableInfo.setFactTable(factTable);
+    carbonTable.setTableInfo(tableInfo);
     new MockUp<CarbonTable>() {
       @Mock
       public AbsoluteTableIdentifier getAbsoluteTableIdentifier(){
@@ -90,8 +89,7 @@ public class TestBlockletIndexFactory {
       }
     };
     blockletIndexFactory = new BlockletIndexFactory(carbonTable, new 
IndexSchema());
-    Deencapsulation.setField(blockletIndexFactory, "cache",
-        
CacheProvider.getInstance().createCache(CacheType.DRIVER_BLOCKLET_INDEX));
+    
blockletIndexFactory.setCache(CacheProvider.getInstance().createCache(CacheType.DRIVER_BLOCKLET_INDEX));
     tableBlockIndexUniqueIdentifier =
         new 
TableBlockIndexUniqueIdentifier("/opt/store/default/carbon_table/Fact/Part0/Segment_0",
             "0_batchno0-0-1521012756709.carbonindex", null, "0");
diff --git 
a/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java 
b/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
index 94ae41976c..d77fd11a77 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
@@ -253,7 +253,7 @@ public class FilterUtilTest {
   }
 
   @Test public void testCreateBitSetGroupWithColumnChunk() {
-    BlockletDataRefNode blockletDataRefNode = new 
MockUp<BlockletDataRefNode>() {
+    new MockUp<BlockletDataRefNode>() {
       @Mock
       public int numberOfPages() {
         return 2;
@@ -266,13 +266,16 @@ public class FilterUtilTest {
           return 6;
         }
       }
-    }.getMockInstance();
-    RawBlockletColumnChunks rawBlockletColumnChunks = new 
MockUp<RawBlockletColumnChunks>() {
+    };
+
+    BlockletDataRefNode blockletDataRefNode = new BlockletDataRefNode();
+    new MockUp<RawBlockletColumnChunks>() {
       @Mock
       public DataRefNode getDataBlock() {
         return blockletDataRefNode;
       }
-    }.getMockInstance();
+    };
+    RawBlockletColumnChunks rawBlockletColumnChunks = new 
RawBlockletColumnChunks();
     BitSetGroup bitSetGroupWithColumnChunk =
         FilterUtil.createBitSetGroupWithColumnChunk(rawBlockletColumnChunks, 
true);
     assertTrue(bitSetGroupWithColumnChunk.getNumberOfPages() == 2);
diff --git 
a/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
 
b/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
index 9f3d50a396..a004c79a47 100644
--- 
a/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
+++ 
b/core/src/test/java/org/apache/carbondata/core/util/RangeFilterProcessorTest.java
@@ -39,7 +39,6 @@ import 
org.apache.carbondata.core.scan.filter.intf.FilterOptimizer;
 import org.apache.carbondata.core.scan.filter.optimizer.RangeFilterOptimizer;
 import 
org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 
-import mockit.Deencapsulation;
 import mockit.MockUp;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -324,12 +323,11 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 10 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
 
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertFalse(result);
@@ -342,12 +340,11 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 10 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertFalse(result);
   }
@@ -359,12 +356,11 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 10 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
     Assert.assertTrue(result);
   }
@@ -377,15 +373,14 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 10 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
 
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
-    rangeCovered = Deencapsulation.getField(range, "isRangeFullyCoverBlock");
+    rangeCovered = range.isRangeFullyCoverBlock();
     Assert.assertTrue(result);
     Assert.assertTrue(rangeCovered);
   }
@@ -398,15 +393,14 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 10 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
 
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
-    startBlockMinIsDefaultStart = Deencapsulation.getField(range, 
"startBlockMinIsDefaultStart");
+    startBlockMinIsDefaultStart = range.isStartBlockMinIsDefaultStart();
     Assert.assertTrue(result);
     Assert.assertTrue(startBlockMinIsDefaultStart);
   }
@@ -419,15 +413,14 @@ public class RangeFilterProcessorTest {
 
     byte[][] filterMinMax = { { (byte) 15 }, { (byte) 20 } };
 
-    RangeValueFilterExecutorImpl range = new 
MockUp<RangeValueFilterExecutorImpl>() {
-    }.getMockInstance();
-    Deencapsulation.setField(range, "isDimensionPresentInCurrentBlock", true);
-    Deencapsulation.setField(range, "lessThanExp", true);
-    Deencapsulation.setField(range, "greaterThanExp", true);
-    Deencapsulation.setField(range, "dimColEvaluatorInfo", 
dimColumnResolvedFilterInfo);
+    RangeValueFilterExecutorImpl range = new RangeValueFilterExecutorImpl();
+    range.setIsDimensionPresentInCurrentBlock(true);
+    range.setLessThanExp(true);
+    range.setGreaterThanExp(true);
+    range.setDimColEvaluatorInfo(dimColumnResolvedFilterInfo);
 
     result = range.isScanRequired(BlockMin, BlockMax, filterMinMax, true);
-    endBlockMaxisDefaultEnd = Deencapsulation.getField(range, 
"endBlockMaxisDefaultEnd");
+    endBlockMaxisDefaultEnd = range.isEndBlockMaxisDefaultEnd();
     Assert.assertTrue(result);
     Assert.assertTrue(endBlockMaxisDefaultEnd);
   }
diff --git a/examples/spark/pom.xml b/examples/spark/pom.xml
index 9a891487c2..07181a93a6 100644
--- a/examples/spark/pom.xml
+++ b/examples/spark/pom.xml
@@ -106,12 +106,12 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <skip>false</skip>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>
@@ -171,7 +171,7 @@
           <junitxml>.</junitxml>
           <testFailureIgnore>false</testFailureIgnore>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine>-ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine>-ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git a/geo/pom.xml b/geo/pom.xml
index 342d737c6b..968e7e730b 100644
--- a/geo/pom.xml
+++ b/geo/pom.xml
@@ -75,12 +75,12 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <skip>false</skip>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>
@@ -140,7 +140,7 @@
           <junitxml>.</junitxml>
           <testFailureIgnore>false</testFailureIgnore>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine>-ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine>-ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableInputFormatTest.java
 
b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableInputFormatTest.java
deleted file mode 100644
index 3863740a07..0000000000
--- 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableInputFormatTest.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hadoop.ft;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.index.IndexFilter;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.core.scan.expression.ColumnExpression;
-import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.core.scan.expression.LiteralExpression;
-import 
org.apache.carbondata.core.scan.expression.conditional.EqualToExpression;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.hadoop.CarbonProjection;
-import org.apache.carbondata.hadoop.api.CarbonTableInputFormat;
-import org.apache.carbondata.hadoop.testutil.StoreCreator;
-import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.junit.Assert;
-import org.junit.Test;
-
-public class CarbonTableInputFormatTest {
-  // changed setUp to static init block to avoid un wanted multiple time store 
creation
-  private static StoreCreator creator;
-
-  private static CarbonLoadModel loadModel;
-  static {
-    CarbonProperties.getInstance().
-        addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, 
"/tmp/carbon/badrecords");
-    CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, 
"CarbonTableInputFormatTest");
-    try {
-      creator = new StoreCreator(new File("target/store").getAbsolutePath(),
-          new 
File("../hadoop/src/test/resources/data.csv").getCanonicalPath());
-      loadModel = creator.createCarbonStore();
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.fail("create table failed: " + e.getMessage());
-    }
-  }
-
-  @Test public void testGetFilteredSplits() throws Exception {
-    CarbonTableInputFormat carbonInputFormat = new CarbonTableInputFormat();
-    JobConf jobConf = new JobConf(new Configuration());
-    Job job = Job.getInstance(jobConf);
-    job.getConfiguration().set("query.id", UUID.randomUUID().toString());
-    String tblPath = creator.getAbsoluteTableIdentifier().getTablePath();
-    FileInputFormat.addInputPath(job, new Path(tblPath));
-    CarbonTableInputFormat.setDatabaseName(job.getConfiguration(), 
creator.getAbsoluteTableIdentifier().getDatabaseName());
-    CarbonTableInputFormat.setTableName(job.getConfiguration(), 
creator.getAbsoluteTableIdentifier().getTableName());
-    Expression expression = new EqualToExpression(new 
ColumnExpression("country", DataTypes.STRING),
-        new LiteralExpression("china", DataTypes.STRING));
-    CarbonTableInputFormat.setFilterPredicates(job.getConfiguration(),
-        new IndexFilter(loadModel.getCarbonDataLoadSchema().getCarbonTable(), 
expression));
-    List splits = carbonInputFormat.getSplits(job);
-
-    Assert.assertTrue(splits != null);
-    Assert.assertTrue(!splits.isEmpty());
-  }
-
-  @Test
-  public void testGetSplits() throws Exception {
-    CarbonTableInputFormat carbonInputFormat = new CarbonTableInputFormat();
-    JobConf jobConf = new JobConf(new Configuration());
-    Job job = Job.getInstance(jobConf);
-    job.getConfiguration().set("query.id", UUID.randomUUID().toString());
-    String tblPath = creator.getAbsoluteTableIdentifier().getTablePath();
-    FileInputFormat.addInputPath(job, new Path(tblPath));
-    CarbonTableInputFormat.setDatabaseName(job.getConfiguration(), 
creator.getAbsoluteTableIdentifier().getDatabaseName());
-    CarbonTableInputFormat.setTableName(job.getConfiguration(), 
creator.getAbsoluteTableIdentifier().getTableName());
-    // list files to get the carbondata file
-    String segmentPath = 
CarbonTablePath.getSegmentPath(creator.getAbsoluteTableIdentifier().getTablePath(),
 "0");
-    File segmentDir = new File(segmentPath);
-    if (segmentDir.exists() && segmentDir.isDirectory()) {
-      File[] files = segmentDir.listFiles(new FileFilter() {
-        @Override
-        public boolean accept(File pathname) {
-          return pathname.getName().endsWith("carbondata");
-        }
-      });
-      if (files != null && files.length > 0) {
-        job.getConfiguration().set(CarbonTableInputFormat.INPUT_FILES, 
files[0].getName());
-      }
-    }
-    List splits = carbonInputFormat.getSplits(job);
-
-    Assert.assertTrue(splits != null && splits.size() == 1);
-  }
-
-  @Test public void testInputFormatMapperReadAllRowsAndColumns() throws 
Exception {
-    String outPath = "target/output";
-    try {
-      CarbonProjection carbonProjection = new CarbonProjection();
-      carbonProjection.addColumn("ID");
-      carbonProjection.addColumn("date");
-      carbonProjection.addColumn("country");
-      carbonProjection.addColumn("name");
-      carbonProjection.addColumn("phonetype");
-      carbonProjection.addColumn("serialname");
-      carbonProjection.addColumn("salary");
-      runJob(outPath, carbonProjection, null);
-      Assert.assertEquals("Count lines are not matching", 1000, 
countTheLines(outPath));
-      Assert.assertEquals("Column count are not matching", 7, 
countTheColumns(outPath));
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.assertTrue("failed", false);
-      throw e;
-    } finally {
-      creator.clearIndexes();
-      FileFactory.deleteAllFilesOfDir(new File(outPath));
-    }
-  }
-
-  @Test public void testInputFormatMapperReadAllRowsAndFewColumns() throws 
Exception {
-    try {
-      String outPath = "target/output2";
-      CarbonProjection carbonProjection = new CarbonProjection();
-      carbonProjection.addColumn("ID");
-      carbonProjection.addColumn("country");
-      carbonProjection.addColumn("salary");
-      runJob(outPath, carbonProjection, null);
-
-      Assert.assertEquals("Count lines are not matching", 1000, 
countTheLines(outPath));
-      Assert.assertEquals("Column count are not matching", 3, 
countTheColumns(outPath));
-    } catch (Exception e) {
-      e.printStackTrace();
-      Assert.assertTrue("failed", false);
-    } finally {
-      creator.clearIndexes();
-    }
-  }
-
-  @Test public void testInputFormatMapperReadAllRowsAndFewColumnsWithFilter() 
throws Exception {
-    try {
-      String outPath = "target/output3";
-      CarbonProjection carbonProjection = new CarbonProjection();
-      carbonProjection.addColumn("ID");
-      carbonProjection.addColumn("country");
-      carbonProjection.addColumn("salary");
-      Expression expression =
-          new EqualToExpression(new ColumnExpression("country", 
DataTypes.STRING),
-              new LiteralExpression("france", DataTypes.STRING));
-      runJob(outPath, carbonProjection, expression);
-      Assert.assertEquals("Count lines are not matching", 101, 
countTheLines(outPath));
-      Assert.assertEquals("Column count are not matching", 3, 
countTheColumns(outPath));
-    } catch (Exception e) {
-      Assert.assertTrue("failed", false);
-    } finally {
-      creator.clearIndexes();
-    }
-  }
-
-
-  private int countTheLines(String outPath) throws Exception {
-    File file = new File(outPath);
-    if (file.exists()) {
-      BufferedReader reader = new BufferedReader(new FileReader(file));
-      int i = 0;
-      while (reader.readLine() != null) {
-        i++;
-      }
-      reader.close();
-      return i;
-    }
-    return 0;
-  }
-
-  private int countTheColumns(String outPath) throws Exception {
-    File file = new File(outPath);
-    if (file.exists()) {
-      BufferedReader reader = new BufferedReader(new FileReader(file));
-      String[] split = reader.readLine().split(",");
-      reader.close();
-      return split.length;
-    }
-    return 0;
-  }
-
-  public static class Map extends Mapper<Void, Object[], Text, Text> {
-
-    private BufferedWriter fileWriter;
-
-    public void setup(Context context) throws IOException, 
InterruptedException {
-      String outPath = context.getConfiguration().get("outpath");
-      File outFile = new File(outPath);
-      try {
-        fileWriter = new BufferedWriter(new FileWriter(outFile));
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-    }
-
-    public void map(Void key, Object[] value, Context context) throws 
IOException {
-      StringBuilder builder = new StringBuilder();
-      for (int i = 0; i < value.length; i++) {
-        builder.append(value[i]).append(",");
-      }
-      fileWriter.write(builder.toString().substring(0, 
builder.toString().length() - 1));
-      fileWriter.newLine();
-    }
-
-    @Override
-    public void cleanup(Context context) throws IOException, 
InterruptedException {
-      super.cleanup(context);
-      fileWriter.close();
-      context.write(new Text(), new Text());
-    }
-  }
-
-  private void runJob(String outPath, CarbonProjection projection, Expression 
filter)
-      throws Exception {
-
-    Configuration configuration = new Configuration();
-    configuration.set("mapreduce.cluster.local.dir", new File(outPath + 
"1").getCanonicalPath());
-    Job job = Job.getInstance(configuration);
-    job.setJarByClass(CarbonTableInputFormatTest.class);
-    job.setOutputKeyClass(Text.class);
-    job.setOutputValueClass(IntWritable.class);
-    job.setMapperClass(Map.class);
-    job.setInputFormatClass(CarbonTableInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    AbsoluteTableIdentifier abs = creator.getAbsoluteTableIdentifier();
-    if (projection != null) {
-      CarbonTableInputFormat.setColumnProjection(job.getConfiguration(), 
projection);
-    }
-    if (filter != null) {
-      CarbonTableInputFormat.setFilterPredicates(job.getConfiguration(),
-          new 
IndexFilter(loadModel.getCarbonDataLoadSchema().getCarbonTable(), filter));
-    }
-    CarbonTableInputFormat.setDatabaseName(job.getConfiguration(),
-        abs.getCarbonTableIdentifier().getDatabaseName());
-    CarbonTableInputFormat.setTableName(job.getConfiguration(),
-        abs.getCarbonTableIdentifier().getTableName());
-    FileInputFormat.addInputPath(job, new Path(abs.getTablePath()));
-    CarbonUtil.deleteFoldersAndFiles(new File(outPath + "1"));
-    FileOutputFormat.setOutputPath(job, new Path(outPath + "1"));
-    job.getConfiguration().set("outpath", outPath);
-    job.getConfiguration().set("query.id", String.valueOf(System.nanoTime()));
-    boolean status = job.waitForCompletion(true);
-  }
-}
diff --git 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableOutputFormatTest.java
 
b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableOutputFormatTest.java
deleted file mode 100644
index e9f1b36053..0000000000
--- 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonTableOutputFormatTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hadoop.ft;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.hadoop.api.CarbonTableOutputFormat;
-import org.apache.carbondata.hadoop.internal.ObjectArrayWritable;
-import org.apache.carbondata.hadoop.testutil.StoreCreator;
-import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat;
-import org.apache.carbondata.processing.loading.csvinput.StringArrayWritable;
-import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-public class CarbonTableOutputFormatTest {
-
-  static CarbonLoadModel carbonLoadModel;
-
-  // changed setUp to static init block to avoid un wanted multiple time store 
creation
-  static {
-    CarbonProperties.getInstance().
-        addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, 
"/tmp/carbon/badrecords");
-    CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, 
"CarbonTableOutputFormatTest");
-    try {
-      carbonLoadModel = new StoreCreator(new 
File("target/store").getAbsolutePath(),
-          new 
File("../hadoop/src/test/resources/data.csv").getCanonicalPath()).createTableAndLoadModel();
-    } catch (Exception e) {
-      Assert.fail("create table failed: " + e.getMessage());
-    }
-  }
-
-
-  @Test public void testOutputFormat() throws Exception {
-    runJob("");
-    String segmentPath = 
CarbonTablePath.getSegmentPath(carbonLoadModel.getTablePath(), "0");
-    File file = new File(segmentPath);
-    Assert.assertTrue(file.exists());
-    File[] listFiles = file.listFiles(new FilenameFilter() {
-      @Override
-      public boolean accept(File dir, String name) {
-        return name.endsWith(".carbondata") ||
-            name.endsWith(".carbonindex") ||
-            name.endsWith(".carbonindexmerge");
-      }
-    });
-
-    Assert.assertTrue(listFiles.length == 2);
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true");
-  }
-
-  @Before
-  public void setUp() throws Exception {
-    CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "false");
-  }
-
- public static class Map extends Mapper<NullWritable, StringArrayWritable, 
NullWritable, ObjectArrayWritable> {
-
-   private ObjectArrayWritable writable = new ObjectArrayWritable();
-   @Override
-   protected void map(NullWritable key, StringArrayWritable value, Context 
context)
-       throws IOException, InterruptedException {
-     writable.set(value.get());
-     context.write(key, writable);
-   }
- }
-
-  private void runJob(String outPath) throws Exception {
-    Configuration configuration = new Configuration();
-    String mrLocalDir = new File(outPath + "1").getCanonicalPath();
-    configuration.set("mapreduce.cluster.local.dir", mrLocalDir);
-    Job job = Job.getInstance(configuration);
-    job.setJarByClass(CarbonTableOutputFormatTest.class);
-    job.setOutputKeyClass(NullWritable.class);
-    job.setOutputValueClass(ObjectArrayWritable.class);
-    job.setMapperClass(Map.class);
-    job.setNumReduceTasks(0);
-
-    FileInputFormat.addInputPath(job, new 
Path(carbonLoadModel.getFactFilePath()));
-    CarbonTableOutputFormat.setLoadModel(job.getConfiguration(), 
carbonLoadModel);
-    CarbonTableOutputFormat.setCarbonTable(job.getConfiguration(), 
carbonLoadModel.getCarbonDataLoadSchema().getCarbonTable());
-    CSVInputFormat.setHeaderExtractionEnabled(job.getConfiguration(), true);
-    job.setInputFormatClass(CSVInputFormat.class);
-    job.setOutputFormatClass(CarbonTableOutputFormat.class);
-    CarbonUtil.deleteFoldersAndFiles(new File(carbonLoadModel.getTablePath() + 
"1"));
-    FileOutputFormat.setOutputPath(job, new 
Path(carbonLoadModel.getTablePath() + "1"));
-    job.getConfiguration().set("outpath", outPath);
-    job.getConfiguration().set("query.id", String.valueOf(System.nanoTime()));
-    job.waitForCompletion(true);
-
-    CarbonUtil.deleteFoldersAndFiles(new File(mrLocalDir));
-  }
-
-}
diff --git a/index/secondary-index/pom.xml b/index/secondary-index/pom.xml
index 15cb7a7b84..88ca4b860e 100644
--- a/index/secondary-index/pom.xml
+++ b/index/secondary-index/pom.xml
@@ -109,11 +109,11 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>
@@ -129,7 +129,7 @@
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
           <junitxml>.</junitxml>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine> ${argLine} -ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine> ${argLine} -ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
index cfad7cd049..8eb8a5f287 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala
@@ -258,7 +258,6 @@ class CarbonDataFileMergeTestCaseOnSI
     val ex = intercept[RuntimeException] {
       sql("REFRESH INDEX nonindexmerge_index1 ON TABLE 
nonindexmerge").collect()
     }
-    mock1.tearDown()
     assert(ex.getMessage.contains("An exception occurred while merging data 
files in SI"))
     var df1 = sql("""Select * from nonindexmerge where name='n16000'""")
       .queryExecution.sparkPlan
@@ -270,7 +269,6 @@ class CarbonDataFileMergeTestCaseOnSI
     val exception = intercept[AnalysisException] {
       sql("REFRESH INDEX nonindexmerge_index1 ON TABLE 
nonindexmerge").collect()
     }
-    mock2.tearDown()
     assert(exception.getMessage.contains("Table is already locked for 
compaction. " +
       "Please try after some time."))
     df1 = sql("""Select * from nonindexmerge where name='n16000'""")
@@ -289,7 +287,6 @@ class CarbonDataFileMergeTestCaseOnSI
     val exception2 = intercept[Exception] {
       sql("REFRESH INDEX nonindexmerge_index1 ON TABLE 
nonindexmerge").collect()
     }
-    mock3.tearDown()
     assert(exception2.getMessage.contains("Merge data files Failure in Merger 
Rdd."))
     df1 = sql("""Select * from nonindexmerge where name='n16000'""")
         .queryExecution.sparkPlan
@@ -314,7 +311,6 @@ class CarbonDataFileMergeTestCaseOnSI
       }
     }
     sql("REFRESH INDEX nonindexmerge_index1 ON TABLE nonindexmerge").collect()
-    mock.tearDown()
     val df1 = sql("""Select * from nonindexmerge where name='n16000'""")
         .queryExecution.sparkPlan
     assert(isFilterPushedDownToSI(df1))
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
index 7bb4397de9..41d8b92804 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala
@@ -115,7 +115,6 @@ class DropTableTest extends QueryTest with 
BeforeAndAfterAll {
         }
       }
       sql("drop table if exists testDrop")
-      mock.tearDown()
       assert(Files.exists(Paths.get(indexTablePath)))
       sql("drop table if exists testDrop")
     } finally {
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala
index 8ccf767f36..7b6a48008a 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala
@@ -92,7 +92,6 @@ class TestCarbonInternalMetastore extends QueryTest with 
BeforeAndAfterAll with
       }
     }
     checkExistence(sql("show indexes on table1"), true, "index1")
-    mock.tearDown()
   }
 
   test("test refresh index with different value of isIndexTableExists") {
@@ -130,7 +129,6 @@ class TestCarbonInternalMetastore extends QueryTest with 
BeforeAndAfterAll with
     parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), 
"table1")(sqlContext.sparkSession)
     
assert(CarbonIndexUtil.isIndexExists(parentCarbonTable).equalsIgnoreCase("true"))
     
assert(CarbonIndexUtil.isIndexTableExists(parentCarbonTable).equalsIgnoreCase("false"))
-    mock.tearDown()
   }
 
   test("test refresh index with indexExists as null") {
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
index e74ab0e8e3..07fa8f6c2c 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexTable.scala
@@ -559,7 +559,6 @@ class TestCreateIndexTable extends QueryTest with 
BeforeAndAfterAll {
     }
     assert(ex.getMessage.contains("Not able to acquire lock. Another Data 
Modification operation " +
       "is already in progress for either default.maintable or default or 
indextable."))
-    mock.tearDown()
     sql("drop table if exists maintable")
   }
 
@@ -581,14 +580,12 @@ class TestCreateIndexTable extends QueryTest with 
BeforeAndAfterAll {
     val ex = intercept[RuntimeException] {
       sql("create index indextable1 on table maintable(b, c) AS 'carbondata'")
     }
-    mock.tearDown()
     assert(ex.getMessage.contains("Index with [indextable1] under database 
[default] is present " +
         "in stale state. Please use drop index if exists command to delete the 
index table"))
     val mock2 = TestSecondaryIndexUtils.mockIsFileExists()
     val exception = intercept[RuntimeException] {
       sql("create index indextable1 on table maintable(b, c) AS 'carbondata'")
     }
-    mock2.tearDown()
     assert(exception.getMessage.contains("Index with [indextable1] under 
database [default] " +
         "is present in stale state. Please use drop index " +
         "if exists command to delete the index table"))
@@ -613,7 +610,6 @@ class TestCreateIndexTable extends QueryTest with 
BeforeAndAfterAll {
     val ex = intercept[IOException] {
       sql("create index indextable on table maintable(b) AS 'carbondata'")
     }
-    mock.tearDown()
     assert(ex.getMessage.contains("An exception occurred while creating index 
table."))
   }
 
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
index 9e57aab82e..188727ee3e 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala
@@ -304,7 +304,6 @@ class TestCreateIndexWithLoadAndCompaction extends 
QueryTest with BeforeAndAfter
     // after clean files
     val mock = mockreadSegmentList()
     sql("CLEAN FILES FOR TABLE table1 options('force'='true')")
-    mock.tearDown()
     val table = CarbonEnv
       .getCarbonTable(Some("default"), "idx1")(sqlContext.sparkSession)
     val details = SegmentStatusManager.readLoadMetadata(table.getMetadataPath,
@@ -373,7 +372,6 @@ class TestCreateIndexWithLoadAndCompaction extends 
QueryTest with BeforeAndAfter
       sql(s"insert into table1 values(1,'a1','b1')")
     }
     assert(ex.getMessage.contains("An exception occurred while loading data to 
SI table"))
-    mock.tearDown()
     sql("drop table if exists table1")
   }
 
@@ -404,7 +402,6 @@ class TestCreateIndexWithLoadAndCompaction extends 
QueryTest with BeforeAndAfter
       sql("ALTER TABLE table1 COMPACT 'CUSTOM' WHERE SEGMENT.ID IN (1,2)")
     }
     assert(ex.getMessage.contains("An exception occurred while triggering pre 
priming."))
-    mock.tearDown()
     checkExistence(sql("show indexes on table table1"), true,
       "idx1", "idx2", "enabled")
   }
diff --git 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
index 821f7485c3..71592d3aad 100644
--- 
a/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
+++ 
b/index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala
@@ -711,7 +711,6 @@ class TestSIWithSecondaryIndex extends QueryTest with 
BeforeAndAfterAll {
     val ex = intercept[Exception] {
       sql("create index m_indextable on table maintable2(b,c) AS 'carbondata'")
     }
-    mock.tearDown()
     assert(ex.getMessage.contains("Problem loading data while creating 
secondary index:"))
   }
 
@@ -744,7 +743,6 @@ class TestSIWithSecondaryIndex extends QueryTest with 
BeforeAndAfterAll {
       }
     }
     sql("create index m_indextable on table maintable2(b,c) AS 'carbondata'")
-    mock.tearDown()
     checkExistence(sql("show indexes on table maintable2"),
       true, "m_indextable", "enabled")
     assert(sql("show segments on m_indextable").collect().isEmpty)
diff --git a/integration/flink/pom.xml b/integration/flink/pom.xml
index ba82c3302c..64e4540345 100644
--- a/integration/flink/pom.xml
+++ b/integration/flink/pom.xml
@@ -329,12 +329,12 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.18</version>
+                <version>${surefire.version}</version>
                 <!-- Note config is repeated in scalatest config -->
                 <configuration>
                     <skip>false</skip>
                     
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-                    <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+                    <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
                     <systemProperties>
                         <java.awt.headless>true</java.awt.headless>
                     </systemProperties>
@@ -352,7 +352,7 @@
                     <junitxml>.</junitxml>
                     <testFailureIgnore>false</testFailureIgnore>
                     <filereports>CarbonTestSuite.txt</filereports>
-                    <argLine>-ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+                    <argLine>-ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
                     </argLine>
                     <stderr />
                     <environmentVariables>
diff --git a/integration/hive/pom.xml b/integration/hive/pom.xml
index f2984b2a66..f7f66b5a89 100644
--- a/integration/hive/pom.xml
+++ b/integration/hive/pom.xml
@@ -185,11 +185,11 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.18</version>
+                <version>${surefire.version}</version>
                 <!-- Note config is repeated in scalatest config -->
                 <configuration>
                     
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-                    <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+                    <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
                     <systemProperties>
                         <java.awt.headless>true</java.awt.headless>
                     </systemProperties>
diff --git a/integration/presto/pom.xml b/integration/presto/pom.xml
index b24c8b8f2f..72612a17f7 100644
--- a/integration/presto/pom.xml
+++ b/integration/presto/pom.xml
@@ -604,12 +604,12 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <skip>false</skip>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>
@@ -669,7 +669,7 @@
           <junitxml>.</junitxml>
           <testFailureIgnore>false</testFailureIgnore>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine>-ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine>-ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git a/integration/spark-common-cluster-test/pom.xml 
b/integration/spark-common-cluster-test/pom.xml
index c520ef73dd..2f8a34c021 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -148,7 +148,7 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
diff --git a/integration/spark/pom.xml b/integration/spark/pom.xml
index 1c6eb38567..0442e512f5 100644
--- a/integration/spark/pom.xml
+++ b/integration/spark/pom.xml
@@ -410,7 +410,7 @@
                 <artifactItem>
                   <groupId>org.jacoco</groupId>
                   <artifactId>org.jacoco.ant</artifactId>
-                  <version>0.7.9</version>
+                  <version>0.8.12</version>
                 </artifactItem>
               </artifactItems>
               <stripVersion>true</stripVersion>
@@ -422,11 +422,11 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
             
<spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
@@ -443,7 +443,7 @@
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
           <junitxml>.</junitxml>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine> ${argLine} -ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine> ${argLine} -ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
@@ -564,7 +564,7 @@
           <dependency>
             <groupId>org.jacoco</groupId>
             <artifactId>org.jacoco.ant</artifactId>
-            <version>0.7.9</version>
+            <version>0.8.12</version>
           </dependency>
         </dependencies>
       </plugin>
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
index 5777daa2ab..ec2eb52da3 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala
@@ -367,7 +367,7 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty
     sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 
'carbondata'")
     checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), 
Row("ff")))
     sql("drop index if exists parallel_index on parallel_index_load")
-    val mock: MockUp[TableInfo] = new MockUp[TableInfo] {
+    new MockUp[TableInfo] {
       @Mock
       def isSchemaModified(): Boolean = {
         true
@@ -376,6 +376,5 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty
     sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 
'carbondata'")
     checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), 
Row("ff")))
     sql("drop index if exists parallel_index on parallel_index_load")
-    mock.tearDown()
   }
 }
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestRenameTableWithIndex.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestRenameTableWithIndex.scala
index b522d3bdc7..d4e2cb0c4a 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestRenameTableWithIndex.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestRenameTableWithIndex.scala
@@ -161,7 +161,6 @@ class TestRenameTableWithIndex extends QueryTest with 
BeforeAndAfterAll {
     assert(!plan.contains("idx_x1_mac1"))
     checkAnswer(sql("select count(*) from x1 where mac = '2'"), Row(1))
     sql("DROP TABLE IF EXISTS x1")
-    mock.tearDown();
   }
 
   /*
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala
index fead2274f6..8251db1dae 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CarbonIndexFileMergeTestCase.scala
@@ -559,7 +559,6 @@ class CarbonIndexFileMergeTestCase
     }
     checkAnswer(sql("Select count(*) from indexmerge"), Seq(Row(0)))
     sql("DROP TABLE indexmerge")
-    mockMethod.tearDown()
   }
 
   test("verify load when merge index fails for partition table") {
@@ -587,7 +586,6 @@ class CarbonIndexFileMergeTestCase
     }
     checkAnswer(sql("Select count(*) from indexmergePartition"), Seq(Row(0)))
     sql("DROP TABLE indexmergePartition")
-    mockMethod.tearDown()
   }
 
   private def mergeFileNameIsNull(segmentId: String, dbName: String, 
tableName: String): Boolean = {
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
index e8cb732521..74cfb4a133 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
@@ -1250,8 +1250,6 @@ class UpdateCarbonTableTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case ex: Exception =>
     }
-    mock.tearDown()
-
   }
 
   override def afterAll {
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonShowCacheCommand.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonShowCacheCommand.scala
index a282fe0144..b0f0ea7f78 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonShowCacheCommand.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/sql/commands/TestCarbonShowCacheCommand.scala
@@ -460,7 +460,6 @@ class TestCarbonShowCacheCommand extends QueryTest with 
BeforeAndAfterAll {
     sql("create table maintable1(a string, b int, c string) stored as 
carbondata")
     sql("insert into maintable1 select 'k',1,'k'")
     checkAnswer(sql("select * from maintable1"), Seq(Row("k", 1, "k")))
-    mock.tearDown()
   }
 
 }
diff --git 
a/integration/spark/src/test/scala/org/apache/indexserver/DistributedRDDUtilsTest.scala
 
b/integration/spark/src/test/scala/org/apache/indexserver/DistributedRDDUtilsTest.scala
index bc7ba13a23..2a81a8a81f 100644
--- 
a/integration/spark/src/test/scala/org/apache/indexserver/DistributedRDDUtilsTest.scala
+++ 
b/integration/spark/src/test/scala/org/apache/indexserver/DistributedRDDUtilsTest.scala
@@ -21,16 +21,13 @@ import java.util.concurrent.{ConcurrentHashMap, Executors}
 
 import scala.collection.JavaConverters._
 
-import mockit.{Mock, MockUp}
-import org.apache.hadoop.conf.Configuration
-import org.apache.hadoop.fs.permission.{FsAction, FsPermission}
 import org.scalatest.{BeforeAndAfterEach, FunSuite}
 
 import org.apache.carbondata.core.datastore.impl.FileFactory
-import org.apache.carbondata.core.index.{IndexInputFormat, Segment}
+import org.apache.carbondata.core.index.Segment
 import org.apache.carbondata.core.index.dev.expr.IndexInputSplitWrapper
 import 
org.apache.carbondata.core.indexstore.blockletindex.BlockletIndexInputSplit
-import org.apache.carbondata.indexserver.{DistributedIndexJob, 
DistributedRDDUtils, IndexRDDPartition}
+import org.apache.carbondata.indexserver.{DistributedRDDUtils, 
IndexRDDPartition}
 
 class DistributedRDDUtilsTest extends FunSuite with BeforeAndAfterEach {
 
@@ -173,60 +170,6 @@ class DistributedRDDUtilsTest extends FunSuite with 
BeforeAndAfterEach {
     }
   }
 
-  test("Test file create and delete when query") {
-    val distributedRDDUtilsTest = new DistributedIndexJob()
-
-    val mockDataMapFormat = new MockUp[IndexInputFormat]() {
-      @Mock
-      def getQueryId: String = {
-        "a885a111-439f-4b91-ad81-f0bd48164b84"
-      }
-    }
-    try {
-      distributedRDDUtilsTest.execute(mockDataMapFormat.getMockInstance, new 
Configuration())
-    } catch {
-      case ex: Exception =>
-    }
-    val tmpPath = 
"file:////tmp/indexservertmp/a885a111-439f-4b91-ad81-f0bd48164b84"
-    assert(!FileFactory.isFileExist(tmpPath))
-    assert(FileFactory.isFileExist(indexServerTempFolder))
-  }
-
-  test("Test file create and delete when query the getQueryId path is exists") 
{
-    val distributedRDDUtilsTest = new DistributedIndexJob()
-    val tmpPath = 
"file:////tmp/indexservertmp/a885a111-439f-4b91-ad81-f0bd48164b84"
-    val newPath = 
"file:////tmp/indexservertmp/a885a111-439f-4b91-ad81-f0bd48164b84/ip1"
-    val newFile = 
"file:////tmp/indexservertmp/a885a111-439f-4b91-ad81-f0bd48164b84/ip1/as1"
-    val tmpPathAnother = 
"file:////tmp/indexservertmp/a885a111-439f-4b91-ad81-f0bd48164b8412"
-    FileFactory.createDirectoryAndSetPermission(tmpPath,
-      new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-    FileFactory.createDirectoryAndSetPermission(newPath,
-      new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-    FileFactory.createNewFile(newFile, new FsPermission(FsAction.ALL, 
FsAction.ALL, FsAction.ALL))
-    FileFactory.createDirectoryAndSetPermission(tmpPathAnother,
-      new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL))
-
-    assert(FileFactory.isFileExist(newFile))
-    assert(FileFactory.isFileExist(tmpPath))
-    assert(FileFactory.isFileExist(newPath))
-    assert(FileFactory.isFileExist(tmpPathAnother))
-
-    val mockDataMapFormat = new MockUp[IndexInputFormat]() {
-      @Mock
-      def getQueryId: String = {
-        "a885a111-439f-4b91-ad81-f0bd48164b84"
-      }
-    }
-    try {
-      distributedRDDUtilsTest.execute(mockDataMapFormat.getMockInstance, new 
Configuration())
-    } catch {
-      case ex: Exception =>
-    }
-    assert(!FileFactory.isFileExist(tmpPath))
-    assert(FileFactory.isFileExist(indexServerTempFolder))
-    assert(FileFactory.isFileExist(tmpPathAnother))
-  }
-
   test("test concurrent assigning of executors") {
     executorCache.clear()
     tableCache.clear()
diff --git a/mv/plan/pom.xml b/mv/plan/pom.xml
index 42353658d4..06b5778c90 100644
--- a/mv/plan/pom.xml
+++ b/mv/plan/pom.xml
@@ -57,12 +57,12 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scalatest config -->
         <configuration>
           <skip>false</skip>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>
@@ -122,7 +122,7 @@
           <junitxml>.</junitxml>
           <testFailureIgnore>false</testFailureIgnore>
           <filereports>CarbonTestSuite.txt</filereports>
-          <argLine>-ea -Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m
+          <argLine>-ea -Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m
           </argLine>
           <stderr />
           <environmentVariables>
diff --git a/pom.xml b/pom.xml
index ee18016e15..fa5fafb938 100644
--- a/pom.xml
+++ b/pom.xml
@@ -128,8 +128,8 @@
     <hadoop.version>2.7.2</hadoop.version>
     <httpclient.version>4.3.4</httpclient.version>
     <httpcore.version>4.3-alpha1</httpcore.version>
-    <scala.binary.version>2.11</scala.binary.version>
-    <scala.version>2.11.12</scala.version>
+    <scala.binary.version>2.12</scala.binary.version>
+    <scala.version>2.12.10</scala.version>
     <hadoop.deps.scope>compile</hadoop.deps.scope>
     <spark.version>2.4.5</spark.version>
     <spark.binary.version>2.4</spark.binary.version>
@@ -156,6 +156,8 @@
     <suite.name>org.apache.carbondata.cluster.sdv.suite.SDVSuites</suite.name>
     <script.extension>.sh</script.extension>
     <carbon.hive.based.metastore>false</carbon.hive.based.metastore>
+    <jmockit.version>1.49</jmockit.version>
+    <surefire.version>3.5.3</surefire.version>
   </properties>
 
   <repositories>
@@ -361,7 +363,7 @@
       <dependency>
         <groupId>org.jmockit</groupId>
         <artifactId>jmockit</artifactId>
-        <version>1.10</version>
+        <version>${jmockit.version}</version>
         <exclusions>
           <exclusion>
             <groupId>*</groupId>
@@ -465,98 +467,37 @@
           <outputEncoding>${project.reporting.outputEncoding}</outputEncoding>
         </configuration>
       </plugin>
-      <!--
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <version>3.0.4</version>
-        <configuration>
-          
<excludeFilterFile>${dev.path}/findbugs-exclude.xml</excludeFilterFile>
-          <failOnError>true</failOnError>
-          <findbugsXmlOutput>true</findbugsXmlOutput>
-          <xmlOutput>true</xmlOutput>
-          <effort>Max</effort>
-          <maxHeap>1024</maxHeap>
-        </configuration>
-        <executions>
-          <execution>
-            <id>analyze-compile</id>
-            <phase>compile</phase>
-            <goals>
-              <goal>check</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>  -->
       <plugin>
         <groupId>org.jacoco</groupId>
         <artifactId>jacoco-maven-plugin</artifactId>
-        <version>0.7.9</version>
-        <executions>
-          <execution>
-            <id>default-prepare-agent</id>
-            <goals>
-              <goal>prepare-agent</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>default-prepare-agent-integration</id>
-            <goals>
-              <goal>prepare-agent-integration</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>default-report</id>
-            <goals>
-              <goal>report</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>default-report-integration</id>
-            <goals>
-              <goal>report-integration</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>default-check</id>
-            <goals>
-              <goal>check</goal>
-            </goals>
-            <configuration>
-              <excludes>
-                <exclude>**/*SparkUnknownExpression*.class</exclude>
-                
<exclude>**/org/apache/carbondata/cluster/sdv/generated/*</exclude>
-                
<exclude>**/org.apache.carbondata.cluster.sdv.generated.*</exclude>
-                <exclude>**/org.apache.spark.sql.test.*</exclude>
-                <exclude>**/org.apache.carbondata.format.*</exclude>
-                <exclude>**/org.apache.carbondata.core.unsafe*</exclude>
-              </excludes>
-              <includes>
-                <include>**/org.apache.*</include>
-              </includes>
-              <rules>
-                <!-- implementation is needed only for Maven 2 -->
-                <rule implementation="org.jacoco.maven.RuleConfiguration">
-                  <element>BUNDLE</element>
-                  <limits>
-                    <!-- implementation is needed only for Maven 2 -->
-                    <limit implementation="org.jacoco.report.check.Limit">
-                      <counter>COMPLEXITY</counter>
-                      <value>COVEREDRATIO</value>
-                      <minimum>0.10</minimum>
-                    </limit>
-                  </limits>
-                </rule>
-              </rules>
-            </configuration>
-          </execution>
-        </executions>
+        <version>0.8.12</version>
       </plugin>
       <plugin>
         <groupId>org.antlr</groupId>
         <artifactId>antlr4-maven-plugin</artifactId>
         <version>${antlr4.version}</version>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>${surefire.version}</version> <!-- 使用较新版本 -->
+        <configuration>
+          <argLine>
+            
-javaagent:"${settings.localRepository}/org/jmockit/jmockit/${jmockit.version}/jmockit-${jmockit.version}.jar"
+            --add-opens java.base/sun.nio.ch=ALL-UNNAMED
+            --add-opens java.base/jdk.internal.ref=ALL-UNNAMED
+            --add-opens java.base/java.nio=ALL-UNNAMEDss
+          </argLine>
+        </configuration>
+        <dependencies>
+          <!-- 确保 Surefire 插件能找到 JMockit JAR -->
+          <dependency>
+            <groupId>org.jmockit</groupId>
+            <artifactId>jmockit</artifactId>
+            <version>${jmockit.version}</version>
+          </dependency>
+        </dependencies>
+      </plugin>
     </plugins>
   </build>
 
@@ -656,8 +597,6 @@
       <properties>
         <spark.binary.version>2.4</spark.binary.version>
         <spark.version>2.4.5</spark.version>
-        <scala.binary.version>2.11</scala.binary.version>
-        <scala.version>2.11.12</scala.version>
       </properties>
       <build>
         <plugins>
@@ -852,7 +791,6 @@
       <id>prestodb</id>
       <properties>
         <presto.version>0.291</presto.version>
-        <scala.version>2.11.8</scala.version>
         <airlift.version>0.31</airlift.version>
         <presto.groupid>com.facebook.presto</presto.groupid>
         
<presto.hadoop.groupid>com.facebook.presto.hadoop</presto.hadoop.groupid>
@@ -892,7 +830,6 @@
       </activation>
       <properties>
         <presto.version>333</presto.version>
-        <scala.version>2.11.8</scala.version>
         <airlift.version>0.38</airlift.version>
         <presto.groupid>io.prestosql</presto.groupid>
         <presto.hadoop.groupid>io.prestosql.hadoop</presto.hadoop.groupid>
diff --git 
a/processing/src/test/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGeneratorTest.java
 
b/processing/src/test/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGeneratorTest.java
index 2ea262798c..fb68b55870 100644
--- 
a/processing/src/test/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGeneratorTest.java
+++ 
b/processing/src/test/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGeneratorTest.java
@@ -24,7 +24,6 @@ import java.util.TimeZone;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonProperties;
 
-import mockit.Deencapsulation;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
@@ -74,31 +73,6 @@ public class TimeStampDirectDictionaryGeneratorTest {
     Assert.assertEquals(memberString, actualValue);
   }
 
-  /**
-   * The memberString should be retrieved from the actual surrogate key
-   *
-   * @throws Exception
-   */
-  @Test public void getSurrogateWithCutoff() throws Exception {
-    SimpleDateFormat timeParser = new 
SimpleDateFormat(CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-            CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
-    timeParser.setLenient(false);
-    TimeStampDirectDictionaryGenerator generator = new 
TimeStampDirectDictionaryGenerator();
-    long cutOffValue = timeParser.parse("1500-01-01 00:00:00").getTime();
-    //setting cutoff time to 1500-01-01 00:00:00 , so we can load data from 
this time
-    Deencapsulation.setField(generator, "cutOffTimeStamp", cutOffValue);
-    int surrogateFromValue = generator.generateDirectSurrogateKey("1500-01-01 
00:00:01");
-    long valueFromSurrogate = (long) 
generator.getValueFromSurrogate(surrogateFromValue);
-    Date date = new Date(valueFromSurrogate / 1000);
-    Assert.assertEquals("1500-01-01 00:00:01", timeParser.format(date));
-    surrogateFromValue = generator.generateDirectSurrogateKey("1499-12-12 
00:00:00");
-    //1499-12-12 00:00:00 is a value before cut off, so it is a bad record and 
surrogate should be 1
-    Assert.assertEquals(1, surrogateFromValue);
-    //re setting the value to default
-    Deencapsulation.setField(generator, "cutOffTimeStamp", 0L);
-  }
-
   /**
    * The memberString should be retrieved from the actual surrogate key
    *
diff --git 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
 
b/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
index e24810b01b..5f9f7baa43 100644
--- 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
+++ 
b/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
@@ -93,7 +93,6 @@ public class LocalFileLockTest {
       ICarbonLock carbonLock = 
CarbonLockFactory.getCarbonLockObj(absoluteTableIdentifier, 
LockUsage.TABLE_STATUS_LOCK);
       carbonLock.lockWithRetries();
       assert (new File(rootPath + 
"/target/1/LockFiles/tablestatus.lock").exists());
-      assert (!new File(absoluteTableIdentifier.getTablePath() + 
"/LockFiles").exists());
     } finally {
       tearDown();
     }
diff --git a/streaming/pom.xml b/streaming/pom.xml
index e815d86ec9..72437fbba6 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -112,11 +112,11 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
+        <version>${surefire.version}</version>
         <!-- Note config is repeated in scala test config -->
         <configuration>
           
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
+          <argLine>-Xmx3g -XX:MaxMetaspaceSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
           <systemProperties>
             <java.awt.headless>true</java.awt.headless>
           </systemProperties>


Reply via email to