This is an automated email from the ASF dual-hosted git repository.

vinoyang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 078d482  [HUDI-624]: Split some of the code from PR for HUDI-479 
(#1344)
078d482 is described below

commit 078d4825d909b2c469398f31c97d2290687321a8
Author: Suneel Marthi <smar...@apache.org>
AuthorDate: Fri Feb 21 01:22:21 2020 -0500

    [HUDI-624]: Split some of the code from PR for HUDI-479 (#1344)
---
 .../hudi/cli/commands/HoodieLogFileCommand.java    | 11 +++----
 .../org/apache/hudi/cli/commands/SparkMain.java    | 10 +++---
 .../java/org/apache/hudi/cli/utils/SparkUtil.java  |  5 ++-
 .../org/apache/hudi/config/HoodieWriteConfig.java  |  4 +--
 .../org/apache/hudi/func/LazyIterableIterator.java |  2 +-
 .../hudi/index/bloom/BloomIndexFileInfo.java       |  9 +++---
 .../org/apache/hudi/io/HoodieAppendHandle.java     |  4 +--
 .../org/apache/hudi/io/HoodieCommitArchiveLog.java |  8 ++---
 .../strategy/BoundedIOCompactionStrategy.java      |  5 ++-
 .../io/compact/strategy/CompactionStrategy.java    |  5 ++-
 .../apache/hudi/metrics/JmxMetricsReporter.java    |  4 +--
 .../org/apache/hudi/table/RollbackExecutor.java    |  6 ++--
 .../org/apache/hudi/TestCompactionAdminClient.java |  1 -
 .../apache/hudi/config/TestHoodieWriteConfig.java  |  4 +--
 .../hudi/index/bloom/TestHoodieBloomIndex.java     | 22 ++++++-------
 .../index/bloom/TestHoodieGlobalBloomIndex.java    | 10 +++---
 .../org/apache/hudi/common/model/HoodieKey.java    |  7 ++---
 .../org/apache/hudi/common/model/HoodieRecord.java |  8 ++---
 .../hudi/common/model/HoodieRecordLocation.java    |  7 ++---
 .../hudi/common/util/BufferedRandomAccessFile.java |  6 +---
 .../java/org/apache/hudi/common/util/FSUtils.java  |  3 +-
 .../hudi/common/util/ObjectSizeCalculator.java     | 36 +++++++++++-----------
 .../log/TestHoodieLogFormatAppendFailure.java      |  4 +--
 .../table/string/TestHoodieActiveTimeline.java     |  1 -
 .../table/view/TestHoodieTableFileSystemView.java  |  5 ++-
 .../hudi/common/util/TestCompactionUtils.java      | 21 ++++++-------
 .../org/apache/hudi/hive/SchemaDifference.java     | 28 +++++++++--------
 .../java/org/apache/hudi/hive/util/SchemaUtil.java |  8 ++---
 .../org/apache/hudi/hive/TestHiveSyncTool.java     |  3 +-
 .../test/java/org/apache/hudi/hive/TestUtil.java   | 15 ++++-----
 .../org/apache/hudi/utilities/UtilHelpers.java     |  9 +++---
 31 files changed, 130 insertions(+), 141 deletions(-)

diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java
index 8a50309..2bb87e0 100644
--- 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java
+++ 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java
@@ -38,8 +38,6 @@ import org.apache.hudi.config.HoodieCompactionConfig;
 import org.apache.hudi.config.HoodieMemoryConfig;
 import org.apache.hudi.hive.util.SchemaUtil;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 import org.apache.avro.Schema;
@@ -59,6 +57,7 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.stream.Collectors;
 
 import scala.Tuple2;
@@ -85,14 +84,14 @@ public class HoodieLogFileCommand implements CommandMarker {
     List<String> logFilePaths = Arrays.stream(fs.globStatus(new 
Path(logFilePathPattern)))
         .map(status -> 
status.getPath().toString()).collect(Collectors.toList());
     Map<String, List<Tuple3<HoodieLogBlockType, Tuple2<Map<HeaderMetadataType, 
String>, Map<HeaderMetadataType, String>>, Integer>>> commitCountAndMetadata =
-        Maps.newHashMap();
+        new HashMap<>();
     int numCorruptBlocks = 0;
     int dummyInstantTimeCount = 0;
 
     for (String logFilePath : logFilePaths) {
       FileStatus[] fsStatus = fs.listStatus(new Path(logFilePath));
       Schema writerSchema = new AvroSchemaConverter()
-          
.convert(Preconditions.checkNotNull(SchemaUtil.readSchemaFromLogFile(fs, new 
Path(logFilePath))));
+          .convert(Objects.requireNonNull(SchemaUtil.readSchemaFromLogFile(fs, 
new Path(logFilePath))));
       Reader reader = HoodieLogFormat.newReader(fs, new 
HoodieLogFile(fsStatus[0].getPath()), writerSchema);
 
       // read the avro blocks
@@ -181,7 +180,7 @@ public class HoodieLogFileCommand implements CommandMarker {
     AvroSchemaConverter converter = new AvroSchemaConverter();
     // get schema from last log file
     Schema readerSchema =
-        
converter.convert(Preconditions.checkNotNull(SchemaUtil.readSchemaFromLogFile(fs,
 new Path(logFilePaths.get(logFilePaths.size() - 1)))));
+        
converter.convert(Objects.requireNonNull(SchemaUtil.readSchemaFromLogFile(fs, 
new Path(logFilePaths.get(logFilePaths.size() - 1)))));
 
     List<IndexedRecord> allRecords = new ArrayList<>();
 
@@ -204,7 +203,7 @@ public class HoodieLogFileCommand implements CommandMarker {
     } else {
       for (String logFile : logFilePaths) {
         Schema writerSchema = new AvroSchemaConverter()
-            
.convert(Preconditions.checkNotNull(SchemaUtil.readSchemaFromLogFile(client.getFs(),
 new Path(logFile))));
+            
.convert(Objects.requireNonNull(SchemaUtil.readSchemaFromLogFile(client.getFs(),
 new Path(logFile))));
         HoodieLogFormat.Reader reader =
             HoodieLogFormat.newReader(fs, new HoodieLogFile(new 
Path(logFile)), writerSchema);
         // read the avro blocks
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
index 13d1c8b..b0771c2 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
@@ -18,11 +18,11 @@
 
 package org.apache.hudi.cli.commands;
 
-import com.google.common.base.Strings;
 import org.apache.hudi.HoodieWriteClient;
 import org.apache.hudi.cli.DedupeSparkJob;
 import org.apache.hudi.cli.utils.SparkUtil;
 import org.apache.hudi.common.util.FSUtils;
+import org.apache.hudi.common.util.StringUtils;
 import org.apache.hudi.config.HoodieIndexConfig;
 import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.index.HoodieIndex;
@@ -81,7 +81,7 @@ public class SparkMain {
       case UPSERT:
         assert (args.length >= 12);
         String propsFilePath = null;
-        if (!Strings.isNullOrEmpty(args[11])) {
+        if (!StringUtils.isNullOrEmpty(args[11])) {
           propsFilePath = args[11];
         }
         List<String> configs = new ArrayList<>();
@@ -94,7 +94,7 @@ public class SparkMain {
       case COMPACT_RUN:
         assert (args.length >= 9);
         propsFilePath = null;
-        if (!Strings.isNullOrEmpty(args[8])) {
+        if (!StringUtils.isNullOrEmpty(args[8])) {
           propsFilePath = args[8];
         }
         configs = new ArrayList<>();
@@ -107,7 +107,7 @@ public class SparkMain {
       case COMPACT_SCHEDULE:
         assert (args.length >= 6);
         propsFilePath = null;
-        if (!Strings.isNullOrEmpty(args[5])) {
+        if (!StringUtils.isNullOrEmpty(args[5])) {
           propsFilePath = args[5];
         }
         configs = new ArrayList<>();
@@ -142,7 +142,7 @@ public class SparkMain {
       case CLEAN:
         assert (args.length >= 5);
         propsFilePath = null;
-        if (!Strings.isNullOrEmpty(args[3])) {
+        if (!StringUtils.isNullOrEmpty(args[3])) {
           propsFilePath = args[3];
         }
         configs = new ArrayList<>();
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
index b71a979..e7ae6f4 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
@@ -24,8 +24,6 @@ import org.apache.hudi.cli.commands.SparkMain;
 import org.apache.hudi.common.util.FSUtils;
 import org.apache.hudi.common.util.StringUtils;
 
-import com.google.common.base.Preconditions;
-
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.launcher.SparkLauncher;
@@ -33,6 +31,7 @@ import org.apache.spark.launcher.SparkLauncher;
 import java.io.File;
 import java.net.URISyntaxException;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  * Utility functions dealing with Spark.
@@ -55,7 +54,7 @@ public class SparkUtil {
       sparkLauncher.setPropertiesFile(propertiesFile);
     }
     File libDirectory = new File(new File(currentJar).getParent(), "lib");
-    for (String library : Preconditions.checkNotNull(libDirectory.list())) {
+    for (String library : Objects.requireNonNull(libDirectory.list())) {
       sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath());
     }
     return sparkLauncher;
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java 
b/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
index 642384b..e4e89df 100644
--- a/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
+++ b/hudi-client/src/main/java/org/apache/hudi/config/HoodieWriteConfig.java
@@ -29,7 +29,6 @@ import org.apache.hudi.index.HoodieIndex;
 import org.apache.hudi.io.compact.strategy.CompactionStrategy;
 import org.apache.hudi.metrics.MetricsReporterType;
 
-import com.google.common.base.Preconditions;
 import org.apache.parquet.hadoop.metadata.CompressionCodecName;
 import org.apache.spark.storage.StorageLevel;
 
@@ -40,6 +39,7 @@ import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Properties;
 
 /**
@@ -764,7 +764,7 @@ public class HoodieWriteConfig extends DefaultHoodieConfig {
 
       // Build WriteConfig at the end
       HoodieWriteConfig config = new HoodieWriteConfig(props);
-      Preconditions.checkArgument(config.getBasePath() != null);
+      Objects.requireNonNull(config.getBasePath());
       return config;
     }
   }
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java 
b/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
index fe5bfbc..2b02224 100644
--- a/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
+++ b/hudi-client/src/main/java/org/apache/hudi/func/LazyIterableIterator.java
@@ -33,7 +33,7 @@ import java.util.Iterator;
  */
 public abstract class LazyIterableIterator<I, O> implements Iterable<O>, 
Iterator<O> {
 
-  protected Iterator<I> inputItr = null;
+  protected Iterator<I> inputItr;
   private boolean consumed = false;
   private boolean startCalled = false;
   private boolean endCalled = false;
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java 
b/hudi-client/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
index c041814..ebd20dc 100644
--- 
a/hudi-client/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
+++ 
b/hudi-client/src/main/java/org/apache/hudi/index/bloom/BloomIndexFileInfo.java
@@ -18,9 +18,8 @@
 
 package org.apache.hudi.index.bloom;
 
-import com.google.common.base.Objects;
-
 import java.io.Serializable;
+import java.util.Objects;
 
 /**
  * Metadata about a given file group, useful for index lookup.
@@ -80,14 +79,14 @@ public class BloomIndexFileInfo implements Serializable {
     }
 
     BloomIndexFileInfo that = (BloomIndexFileInfo) o;
-    return Objects.equal(that.fileId, fileId) && 
Objects.equal(that.minRecordKey, minRecordKey)
-        && Objects.equal(that.maxRecordKey, maxRecordKey);
+    return Objects.equals(that.fileId, fileId) && 
Objects.equals(that.minRecordKey, minRecordKey)
+        && Objects.equals(that.maxRecordKey, maxRecordKey);
 
   }
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(fileId, minRecordKey, maxRecordKey);
+    return Objects.hash(fileId, minRecordKey, maxRecordKey);
   }
 
   @Override
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/io/HoodieAppendHandle.java 
b/hudi-client/src/main/java/org/apache/hudi/io/HoodieAppendHandle.java
index d37e11f..674fd9a 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/HoodieAppendHandle.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/HoodieAppendHandle.java
@@ -44,7 +44,6 @@ import org.apache.hudi.exception.HoodieAppendException;
 import org.apache.hudi.exception.HoodieUpsertException;
 import org.apache.hudi.table.HoodieTable;
 
-import com.google.common.collect.Maps;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.generic.IndexedRecord;
 import org.apache.hadoop.fs.Path;
@@ -56,6 +55,7 @@ import org.apache.spark.util.SizeEstimator;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Iterator;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicLong;
@@ -97,7 +97,7 @@ public class HoodieAppendHandle<T extends 
HoodieRecordPayload> extends HoodieWri
   // Max block size to limit to for a log block
   private int maxBlockSize = config.getLogFileDataBlockMaxSize();
   // Header metadata for a log block
-  private Map<HeaderMetadataType, String> header = Maps.newHashMap();
+  private Map<HeaderMetadataType, String> header = new HashMap<>();
   // Total number of new records inserted into the delta file
   private long insertRecordsWritten = 0;
 
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/io/HoodieCommitArchiveLog.java 
b/hudi-client/src/main/java/org/apache/hudi/io/HoodieCommitArchiveLog.java
index 6847a24..2d2d1e3 100644
--- a/hudi-client/src/main/java/org/apache/hudi/io/HoodieCommitArchiveLog.java
+++ b/hudi-client/src/main/java/org/apache/hudi/io/HoodieCommitArchiveLog.java
@@ -49,8 +49,6 @@ import org.apache.hudi.table.HoodieTable;
 
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.IndexedRecord;
 import org.apache.hadoop.fs.Path;
@@ -61,7 +59,9 @@ import org.apache.spark.api.java.JavaSparkContext;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
@@ -145,7 +145,7 @@ public class HoodieCommitArchiveLog {
     // TODO: Handle ROLLBACK_ACTION in future
     // ROLLBACK_ACTION is currently not defined in HoodieActiveTimeline
     HoodieTimeline cleanAndRollbackTimeline = table.getActiveTimeline()
-        
.getTimelineOfActions(Sets.newHashSet(HoodieTimeline.CLEAN_ACTION)).filterCompletedInstants();
+        
.getTimelineOfActions(Collections.singleton(HoodieTimeline.CLEAN_ACTION)).filterCompletedInstants();
     Stream<HoodieInstant> instants = cleanAndRollbackTimeline.getInstants()
         
.collect(Collectors.groupingBy(HoodieInstant::getAction)).values().stream().map(hoodieInstants
 -> {
           if (hoodieInstants.size() > maxCommitsToKeep) {
@@ -270,7 +270,7 @@ public class HoodieCommitArchiveLog {
 
   private void writeToFile(Schema wrapperSchema, List<IndexedRecord> records) 
throws Exception {
     if (records.size() > 0) {
-      Map<HeaderMetadataType, String> header = Maps.newHashMap();
+      Map<HeaderMetadataType, String> header = new HashMap<>();
       header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, 
wrapperSchema.toString());
       HoodieAvroDataBlock block = new HoodieAvroDataBlock(records, header);
       this.writer = writer.appendBlock(block);
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedIOCompactionStrategy.java
 
b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedIOCompactionStrategy.java
index c84df1b..8976d22 100644
--- 
a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedIOCompactionStrategy.java
+++ 
b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/BoundedIOCompactionStrategy.java
@@ -22,8 +22,7 @@ import org.apache.hudi.avro.model.HoodieCompactionOperation;
 import org.apache.hudi.avro.model.HoodieCompactionPlan;
 import org.apache.hudi.config.HoodieWriteConfig;
 
-import com.google.common.collect.Lists;
-
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -40,7 +39,7 @@ public class BoundedIOCompactionStrategy extends 
CompactionStrategy {
     // Iterate through the operations in order and accept operations as long 
as we are within the
     // IO limit
     // Preserves the original ordering of compactions
-    List<HoodieCompactionOperation> finalOperations = Lists.newArrayList();
+    List<HoodieCompactionOperation> finalOperations = new ArrayList<>();
     long targetIORemaining = writeConfig.getTargetIOPerCompactionInMB();
     for (HoodieCompactionOperation op : operations) {
       long opIo = op.getMetrics().get(TOTAL_IO_MB).longValue();
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/CompactionStrategy.java
 
b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/CompactionStrategy.java
index dd17212..9e36b4f 100644
--- 
a/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/CompactionStrategy.java
+++ 
b/hudi-client/src/main/java/org/apache/hudi/io/compact/strategy/CompactionStrategy.java
@@ -28,9 +28,8 @@ import org.apache.hudi.common.util.Option;
 import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.io.compact.HoodieMergeOnReadTableCompactor;
 
-import com.google.common.collect.Maps;
-
 import java.io.Serializable;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -61,7 +60,7 @@ public abstract class CompactionStrategy implements 
Serializable {
    */
   public Map<String, Double> captureMetrics(HoodieWriteConfig writeConfig, 
Option<HoodieBaseFile> dataFile,
       String partitionPath, List<HoodieLogFile> logFiles) {
-    Map<String, Double> metrics = Maps.newHashMap();
+    Map<String, Double> metrics = new HashMap<>();
     long defaultMaxParquetFileSize = writeConfig.getParquetMaxFileSize();
     // Total size of all the log files
     Long totalLogFileSize = 
logFiles.stream().map(HoodieLogFile::getFileSize).filter(size -> size >= 0)
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/metrics/JmxMetricsReporter.java 
b/hudi-client/src/main/java/org/apache/hudi/metrics/JmxMetricsReporter.java
index 2559a4b..921dcea 100644
--- a/hudi-client/src/main/java/org/apache/hudi/metrics/JmxMetricsReporter.java
+++ b/hudi-client/src/main/java/org/apache/hudi/metrics/JmxMetricsReporter.java
@@ -21,7 +21,6 @@ package org.apache.hudi.metrics;
 import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.exception.HoodieException;
 
-import com.google.common.base.Preconditions;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
 
@@ -32,6 +31,7 @@ import javax.management.remote.JMXServiceURL;
 import java.io.Closeable;
 import java.lang.management.ManagementFactory;
 import java.rmi.registry.LocateRegistry;
+import java.util.Objects;
 
 /**
  * Implementation of Jmx reporter, which used to report jmx metric.
@@ -67,7 +67,7 @@ public class JmxMetricsReporter extends MetricsReporter {
   @Override
   public void start() {
     try {
-      Preconditions.checkNotNull(connector, "Cannot start as the jmxReporter 
is null.");
+      Objects.requireNonNull(connector, "Cannot start as the jmxReporter is 
null.");
       connector.start();
     } catch (Exception e) {
       throw new HoodieException(e);
diff --git 
a/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java 
b/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
index 0f3297c..fac08b0 100644
--- a/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
+++ b/hudi-client/src/main/java/org/apache/hudi/table/RollbackExecutor.java
@@ -32,7 +32,6 @@ import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.exception.HoodieRollbackException;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.PathFilter;
@@ -47,6 +46,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 import scala.Tuple2;
 
@@ -129,7 +129,7 @@ public class RollbackExecutor implements Serializable {
           // getFileStatus would reflect correct stats and 
FileNotFoundException is not thrown in
           // cloud-storage : HUDI-168
           Map<FileStatus, Long> filesToNumBlocksRollback = new HashMap<>();
-          
filesToNumBlocksRollback.put(metaClient.getFs().getFileStatus(Preconditions.checkNotNull(writer).getLogFile().getPath()),
 1L);
+          
filesToNumBlocksRollback.put(metaClient.getFs().getFileStatus(Objects.requireNonNull(writer).getLogFile().getPath()),
 1L);
           return new Tuple2<>(rollbackRequest.getPartitionPath(),
                   
HoodieRollbackStat.newBuilder().withPartitionPath(rollbackRequest.getPartitionPath())
                           
.withRollbackBlockAppendResults(filesToNumBlocksRollback).build());
@@ -215,7 +215,7 @@ public class RollbackExecutor implements Serializable {
 
   private Map<HeaderMetadataType, String> generateHeader(String commit) {
     // generate metadata
-    Map<HeaderMetadataType, String> header = Maps.newHashMap();
+    Map<HeaderMetadataType, String> header = new HashMap<>();
     header.put(HeaderMetadataType.INSTANT_TIME, 
metaClient.getActiveTimeline().lastInstant().get().getTimestamp());
     header.put(HeaderMetadataType.TARGET_INSTANT_TIME, commit);
     header.put(HeaderMetadataType.COMMAND_BLOCK_TYPE,
diff --git 
a/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java 
b/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
index 0ed435c..ef14235 100644
--- a/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
+++ b/hudi-client/src/test/java/org/apache/hudi/TestCompactionAdminClient.java
@@ -32,7 +32,6 @@ import org.apache.hudi.common.util.Option;
 import org.apache.hudi.common.util.collection.Pair;
 import org.apache.hudi.exception.HoodieException;
 import org.apache.hudi.exception.HoodieIOException;
-
 import org.apache.hudi.func.OperationResult;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
diff --git 
a/hudi-client/src/test/java/org/apache/hudi/config/TestHoodieWriteConfig.java 
b/hudi-client/src/test/java/org/apache/hudi/config/TestHoodieWriteConfig.java
index e2d2a6c..80b59a0 100644
--- 
a/hudi-client/src/test/java/org/apache/hudi/config/TestHoodieWriteConfig.java
+++ 
b/hudi-client/src/test/java/org/apache/hudi/config/TestHoodieWriteConfig.java
@@ -20,13 +20,13 @@ package org.apache.hudi.config;
 
 import org.apache.hudi.config.HoodieWriteConfig.Builder;
 
-import com.google.common.collect.Maps;
 import org.junit.Test;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
 
@@ -37,7 +37,7 @@ public class TestHoodieWriteConfig {
   @Test
   public void testPropertyLoading() throws IOException {
     Builder builder = HoodieWriteConfig.newBuilder().withPath("/tmp");
-    Map<String, String> params = Maps.newHashMap();
+    Map<String, String> params = new HashMap<>();
     params.put(HoodieCompactionConfig.CLEANER_COMMITS_RETAINED_PROP, "1");
     params.put(HoodieCompactionConfig.MAX_COMMITS_TO_KEEP_PROP, "5");
     params.put(HoodieCompactionConfig.MIN_COMMITS_TO_KEEP_PROP, "2");
diff --git 
a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
 
b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
index 8bbd527..3e3ea02 100644
--- 
a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
+++ 
b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
@@ -37,7 +37,6 @@ import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.io.HoodieKeyLookupHandle;
 import org.apache.hudi.table.HoodieTable;
 
-import com.google.common.collect.Lists;
 import org.apache.avro.Schema;
 import org.apache.hadoop.fs.Path;
 import org.apache.spark.api.java.JavaPairRDD;
@@ -50,6 +49,7 @@ import org.junit.runners.Parameterized;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -149,11 +149,11 @@ public class TestHoodieBloomIndex extends 
HoodieClientTestHarness {
     HoodieRecord record4 =
         new HoodieRecord(new HoodieKey(rowChange4.getRowKey(), 
rowChange4.getPartitionPath()), rowChange4);
 
-    HoodieClientTestUtils.writeParquetFile(basePath, "2016/04/01", 
"2_0_20160401010101.parquet", Lists.newArrayList(),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2016/04/01", 
"2_0_20160401010101.parquet", new ArrayList<>(),
         schema, null, false);
-    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"1_0_20150312101010.parquet", Lists.newArrayList(),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"1_0_20150312101010.parquet", new ArrayList<>(),
         schema, null, false);
-    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"3_0_20150312101010.parquet", Arrays.asList(record1),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"3_0_20150312101010.parquet", Collections.singletonList(record1),
         schema, null, false);
     HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"4_0_20150312101010.parquet",
         Arrays.asList(record2, record3, record4), schema, null, false);
@@ -337,11 +337,11 @@ public class TestHoodieBloomIndex extends 
HoodieClientTestHarness {
 
     // We create three parquet file, each having one record. (two different 
partitions)
     String filename1 =
-        HoodieClientTestUtils.writeParquetFile(basePath, "2016/01/31", 
Arrays.asList(record1), schema, null, true);
+        HoodieClientTestUtils.writeParquetFile(basePath, "2016/01/31", 
Collections.singletonList(record1), schema, null, true);
     String filename2 =
-        HoodieClientTestUtils.writeParquetFile(basePath, "2016/01/31", 
Arrays.asList(record2), schema, null, true);
+        HoodieClientTestUtils.writeParquetFile(basePath, "2016/01/31", 
Collections.singletonList(record2), schema, null, true);
     String filename3 =
-        HoodieClientTestUtils.writeParquetFile(basePath, "2015/01/31", 
Arrays.asList(record4), schema, null, true);
+        HoodieClientTestUtils.writeParquetFile(basePath, "2015/01/31", 
Collections.singletonList(record4), schema, null, true);
 
     // We do the tag again
     metaClient = HoodieTableMetaClient.reload(metaClient);
@@ -353,14 +353,14 @@ public class TestHoodieBloomIndex extends 
HoodieClientTestHarness {
     for (HoodieRecord record : taggedRecordRDD.collect()) {
       if (record.getRecordKey().equals(rowKey1)) {
         if (record.getPartitionPath().equals("2015/01/31")) {
-          
assertTrue(record.getCurrentLocation().getFileId().equals(FSUtils.getFileId(filename3)));
+          assertEquals(record.getCurrentLocation().getFileId(), 
FSUtils.getFileId(filename3));
         } else {
-          
assertTrue(record.getCurrentLocation().getFileId().equals(FSUtils.getFileId(filename1)));
+          assertEquals(record.getCurrentLocation().getFileId(), 
FSUtils.getFileId(filename1));
         }
       } else if (record.getRecordKey().equals(rowKey2)) {
-        
assertTrue(record.getCurrentLocation().getFileId().equals(FSUtils.getFileId(filename2)));
+        assertEquals(record.getCurrentLocation().getFileId(), 
FSUtils.getFileId(filename2));
       } else if (record.getRecordKey().equals(rowKey3)) {
-        assertTrue(!record.isCurrentLocationKnown());
+        assertFalse(record.isCurrentLocationKnown());
       }
     }
   }
diff --git 
a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
 
b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
index 15e77f4..6d4e67f 100644
--- 
a/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
+++ 
b/hudi-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieGlobalBloomIndex.java
@@ -33,7 +33,6 @@ import org.apache.hudi.config.HoodieIndexConfig;
 import org.apache.hudi.config.HoodieWriteConfig;
 import org.apache.hudi.table.HoodieTable;
 
-import com.google.common.collect.Lists;
 import org.apache.avro.Schema;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
@@ -43,6 +42,7 @@ import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
@@ -117,11 +117,11 @@ public class TestHoodieGlobalBloomIndex extends 
HoodieClientTestHarness {
     HoodieRecord record4 =
         new HoodieRecord(new HoodieKey(rowChange4.getRowKey(), 
rowChange4.getPartitionPath()), rowChange4);
 
-    HoodieClientTestUtils.writeParquetFile(basePath, "2016/04/01", 
"2_0_20160401010101.parquet", Lists.newArrayList(),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2016/04/01", 
"2_0_20160401010101.parquet", new ArrayList<>(),
         schema, null, false);
-    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"1_0_20150312101010.parquet", Lists.newArrayList(),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"1_0_20150312101010.parquet", new ArrayList<>(),
         schema, null, false);
-    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"3_0_20150312101010.parquet", Arrays.asList(record1),
+    HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"3_0_20150312101010.parquet", Collections.singletonList(record1),
         schema, null, false);
     HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
"4_0_20150312101010.parquet",
         Arrays.asList(record2, record3, record4), schema, null, false);
@@ -252,7 +252,7 @@ public class TestHoodieGlobalBloomIndex extends 
HoodieClientTestHarness {
         HoodieClientTestUtils.writeParquetFile(basePath, "2016/04/01", 
Collections.singletonList(record1),
                 schema, null, false);
     String filename1 =
-        HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
Lists.newArrayList(),
+        HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", new 
ArrayList<>(),
                 schema, null, false);
     String filename2 =
         HoodieClientTestUtils.writeParquetFile(basePath, "2015/03/12", 
Collections.singletonList(record2),
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieKey.java 
b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieKey.java
index 22a05fa..a9401c9 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieKey.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieKey.java
@@ -18,9 +18,8 @@
 
 package org.apache.hudi.common.model;
 
-import com.google.common.base.Objects;
-
 import java.io.Serializable;
+import java.util.Objects;
 
 /**
  * HoodieKey consists of
@@ -58,12 +57,12 @@ public class HoodieKey implements Serializable {
       return false;
     }
     HoodieKey otherKey = (HoodieKey) o;
-    return Objects.equal(recordKey, otherKey.recordKey) && 
Objects.equal(partitionPath, otherKey.partitionPath);
+    return Objects.equals(recordKey, otherKey.recordKey) && 
Objects.equals(partitionPath, otherKey.partitionPath);
   }
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(recordKey, partitionPath);
+    return Objects.hash(recordKey, partitionPath);
   }
 
   @Override
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java 
b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
index 3f1e95a..843cc70 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecord.java
@@ -20,11 +20,11 @@ package org.apache.hudi.common.model;
 
 import org.apache.hudi.common.util.Option;
 
-import com.google.common.base.Objects;
 import com.google.common.collect.ImmutableList;
 
 import java.io.Serializable;
 import java.util.List;
+import java.util.Objects;
 
 /**
  * A Single Record managed by Hoodie.
@@ -141,13 +141,13 @@ public class HoodieRecord<T extends HoodieRecordPayload> 
implements Serializable
       return false;
     }
     HoodieRecord that = (HoodieRecord) o;
-    return Objects.equal(key, that.key) && Objects.equal(data, that.data)
-        && Objects.equal(currentLocation, that.currentLocation) && 
Objects.equal(newLocation, that.newLocation);
+    return Objects.equals(key, that.key) && Objects.equals(data, that.data)
+        && Objects.equals(currentLocation, that.currentLocation) && 
Objects.equals(newLocation, that.newLocation);
   }
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(key, data, currentLocation, newLocation);
+    return Objects.hash(key, data, currentLocation, newLocation);
   }
 
   @Override
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordLocation.java
 
b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordLocation.java
index 2c522d1..690db88 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordLocation.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordLocation.java
@@ -18,9 +18,8 @@
 
 package org.apache.hudi.common.model;
 
-import com.google.common.base.Objects;
-
 import java.io.Serializable;
+import java.util.Objects;
 
 /**
  * Location of a HoodieRecord within the partition it belongs to. Ultimately, 
this points to an actual file on disk
@@ -44,12 +43,12 @@ public class HoodieRecordLocation implements Serializable {
       return false;
     }
     HoodieRecordLocation otherLoc = (HoodieRecordLocation) o;
-    return Objects.equal(instantTime, otherLoc.instantTime) && 
Objects.equal(fileId, otherLoc.fileId);
+    return Objects.equals(instantTime, otherLoc.instantTime) && 
Objects.equals(fileId, otherLoc.fileId);
   }
 
   @Override
   public int hashCode() {
-    return Objects.hashCode(instantTime, fileId);
+    return Objects.hash(instantTime, fileId);
   }
 
   @Override
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/util/BufferedRandomAccessFile.java
 
b/hudi-common/src/main/java/org/apache/hudi/common/util/BufferedRandomAccessFile.java
index 15cf4c3..f40a46e 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/common/util/BufferedRandomAccessFile.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/common/util/BufferedRandomAccessFile.java
@@ -268,11 +268,7 @@ public final class BufferedRandomAccessFile extends 
RandomAccessFile {
     this.seek(this.currentPosition);
 
     // if currentPosition is at start, EOF has been reached
-    if (this.currentPosition == this.validLastPosition) {
-      return false;
-    }
-
-    return true;
+    return this.currentPosition != this.validLastPosition;
   }
 
   /**
diff --git a/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java 
b/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
index 3cc7bf2..9ffc38c 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/FSUtils.java
@@ -47,6 +47,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Objects;
 import java.util.Map.Entry;
 import java.util.UUID;
 import java.util.function.Function;
@@ -248,7 +249,7 @@ public class FSUtils {
   }
 
   public static String getFileExtension(String fullName) {
-    Preconditions.checkNotNull(fullName);
+    Objects.requireNonNull(fullName);
     String fileName = (new File(fullName)).getName();
     int dotIndex = fileName.indexOf('.');
     return dotIndex == -1 ? "" : fileName.substring(dotIndex);
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/util/ObjectSizeCalculator.java
 
b/hudi-common/src/main/java/org/apache/hudi/common/util/ObjectSizeCalculator.java
index 1898a4e..7609316 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/common/util/ObjectSizeCalculator.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/common/util/ObjectSizeCalculator.java
@@ -16,12 +16,6 @@
 
 package org.apache.hudi.common.util;
 
-import com.google.common.base.Preconditions;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.collect.Sets;
-
 import java.lang.management.ManagementFactory;
 import java.lang.management.MemoryPoolMXBean;
 import java.lang.reflect.Array;
@@ -29,9 +23,13 @@ import java.lang.reflect.Field;
 import java.lang.reflect.Modifier;
 import java.util.ArrayDeque;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.Deque;
+import java.util.IdentityHashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 
 /**
@@ -121,16 +119,9 @@ public class ObjectSizeCalculator {
   // added.
   private final int superclassFieldPadding;
 
-  private final LoadingCache<Class<?>, ClassSizeInfo> classSizeInfos =
-      CacheBuilder.newBuilder().build(new CacheLoader<Class<?>, 
ClassSizeInfo>() {
-        @Override
-        public ClassSizeInfo load(Class<?> clazz) {
-          return new ClassSizeInfo(clazz);
-        }
-      });
-
+  private final Map<Class<?>, ClassSizeInfo> classSizeInfos = new 
IdentityHashMap<>();
 
-  private final Set<Object> alreadyVisited = Sets.newIdentityHashSet();
+  private final Set<Object> alreadyVisited = Collections.newSetFromMap(new 
IdentityHashMap<>());
   private final Deque<Object> pending = new ArrayDeque<>(16 * 1024);
   private long size;
 
@@ -140,7 +131,7 @@ public class ObjectSizeCalculator {
    * @param memoryLayoutSpecification a description of the JVM memory layout.
    */
   public ObjectSizeCalculator(MemoryLayoutSpecification 
memoryLayoutSpecification) {
-    Preconditions.checkNotNull(memoryLayoutSpecification);
+    Objects.requireNonNull(memoryLayoutSpecification);
     arrayHeaderSize = memoryLayoutSpecification.getArrayHeaderSize();
     objectHeaderSize = memoryLayoutSpecification.getObjectHeaderSize();
     objectPadding = memoryLayoutSpecification.getObjectPadding();
@@ -175,6 +166,15 @@ public class ObjectSizeCalculator {
     }
   }
 
+  private ClassSizeInfo getClassSizeInfo(final Class<?> clazz) {
+    ClassSizeInfo csi = classSizeInfos.get(clazz);
+    if (csi == null) {
+      csi = new ClassSizeInfo(clazz);
+      classSizeInfos.put(clazz, csi);
+    }
+    return csi;
+  }
+
   private void visit(Object obj) {
     if (alreadyVisited.contains(obj)) {
       return;
@@ -187,7 +187,7 @@ public class ObjectSizeCalculator {
       if (clazz.isArray()) {
         visitArray(obj);
       } else {
-        classSizeInfos.getUnchecked(clazz).visit(obj, this);
+        getClassSizeInfo(clazz).visit(obj, this);
       }
     }
   }
@@ -282,7 +282,7 @@ public class ObjectSizeCalculator {
       }
       final Class<?> superClass = clazz.getSuperclass();
       if (superClass != null) {
-        final ClassSizeInfo superClassInfo = 
classSizeInfos.getUnchecked(superClass);
+        final ClassSizeInfo superClassInfo = getClassSizeInfo(superClass);
         fieldsSize += roundTo(superClassInfo.fieldsSize, 
superclassFieldPadding);
         referenceFields.addAll(Arrays.asList(superClassInfo.referenceFields));
       }
diff --git 
a/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormatAppendFailure.java
 
b/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormatAppendFailure.java
index f09db9e..4acdf07 100644
--- 
a/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormatAppendFailure.java
+++ 
b/hudi-common/src/test/java/org/apache/hudi/common/table/log/TestHoodieLogFormatAppendFailure.java
@@ -25,7 +25,6 @@ import 
org.apache.hudi.common.table.log.block.HoodieAvroDataBlock;
 import org.apache.hudi.common.table.log.block.HoodieLogBlock;
 import org.apache.hudi.common.util.SchemaTestUtil;
 
-import com.google.common.collect.Maps;
 import org.apache.avro.generic.IndexedRecord;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -45,6 +44,7 @@ import org.junit.Test;
 import java.io.File;
 import java.io.IOException;
 import java.net.URISyntaxException;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.UUID;
@@ -97,7 +97,7 @@ public class TestHoodieLogFormatAppendFailure {
 
     // Some data & append.
     List<IndexedRecord> records = SchemaTestUtil.generateTestRecords(0, 10);
-    Map<HoodieLogBlock.HeaderMetadataType, String> header = Maps.newHashMap();
+    Map<HoodieLogBlock.HeaderMetadataType, String> header = new HashMap<>(2);
     header.put(HoodieLogBlock.HeaderMetadataType.INSTANT_TIME, "100");
     header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, 
getSimpleSchema().toString());
     HoodieAvroDataBlock dataBlock = new HoodieAvroDataBlock(records, header);
diff --git 
a/hudi-common/src/test/java/org/apache/hudi/common/table/string/TestHoodieActiveTimeline.java
 
b/hudi-common/src/test/java/org/apache/hudi/common/table/string/TestHoodieActiveTimeline.java
index c416c21..d77392f 100755
--- 
a/hudi-common/src/test/java/org/apache/hudi/common/table/string/TestHoodieActiveTimeline.java
+++ 
b/hudi-common/src/test/java/org/apache/hudi/common/table/string/TestHoodieActiveTimeline.java
@@ -199,7 +199,6 @@ public class TestHoodieActiveTimeline extends 
HoodieCommonTestHarness {
     checkTimeline.accept(timeline.getCommitsAndCompactionTimeline(),
             Sets.newHashSet(HoodieTimeline.COMMIT_ACTION, 
HoodieTimeline.DELTA_COMMIT_ACTION, HoodieTimeline.COMPACTION_ACTION));
     checkTimeline.accept(timeline.getCommitTimeline(), 
Collections.singleton(HoodieTimeline.COMMIT_ACTION));
-
     checkTimeline.accept(timeline.getDeltaCommitTimeline(), 
Collections.singleton(HoodieTimeline.DELTA_COMMIT_ACTION));
     checkTimeline.accept(timeline.getCleanerTimeline(), 
Collections.singleton(HoodieTimeline.CLEAN_ACTION));
     checkTimeline.accept(timeline.getRollbackTimeline(), 
Collections.singleton(HoodieTimeline.ROLLBACK_ACTION));
diff --git 
a/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestHoodieTableFileSystemView.java
 
b/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestHoodieTableFileSystemView.java
index 2ac6945..fde7126 100644
--- 
a/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestHoodieTableFileSystemView.java
+++ 
b/hudi-common/src/test/java/org/apache/hudi/common/table/view/TestHoodieTableFileSystemView.java
@@ -40,7 +40,6 @@ import org.apache.hudi.common.util.FSUtils;
 import org.apache.hudi.common.util.Option;
 import org.apache.hudi.common.util.collection.Pair;
 
-import com.google.common.collect.Lists;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.log4j.LogManager;
@@ -814,7 +813,7 @@ public class TestHoodieTableFileSystemView extends 
HoodieCommonTestHarness {
     roView.getAllBaseFiles("2016/05/01/");
 
     List<HoodieBaseFile> dataFiles =
-        roView.getLatestBaseFilesInRange(Lists.newArrayList(commitTime2, 
commitTime3)).collect(Collectors.toList());
+        roView.getLatestBaseFilesInRange(Arrays.asList(commitTime2, 
commitTime3)).collect(Collectors.toList());
     assertEquals(isLatestFileSliceOnly ? 2 : 3, dataFiles.size());
     Set<String> filenames = new HashSet<>();
     for (HoodieBaseFile status : dataFiles) {
@@ -828,7 +827,7 @@ public class TestHoodieTableFileSystemView extends 
HoodieCommonTestHarness {
     }
 
     List<FileSlice> slices =
-        rtView.getLatestFileSliceInRange(Lists.newArrayList(commitTime3, 
commitTime4)).collect(Collectors.toList());
+        rtView.getLatestFileSliceInRange(Arrays.asList(commitTime3, 
commitTime4)).collect(Collectors.toList());
     assertEquals(3, slices.size());
     for (FileSlice slice : slices) {
       if (slice.getFileId().equals(fileId1)) {
diff --git 
a/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
 
b/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
index 94d8b66..2d3cf2e 100644
--- 
a/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
+++ 
b/hudi-common/src/test/java/org/apache/hudi/common/util/TestCompactionUtils.java
@@ -159,13 +159,12 @@ public class TestCompactionUtils extends 
HoodieCommonTestHarness {
     Pair<List<Pair<String, FileSlice>>, HoodieCompactionPlan> inputAndPlan = 
buildCompactionPlan();
     HoodieCompactionPlan plan = inputAndPlan.getRight();
     List<HoodieCompactionOperation> originalOps = plan.getOperations();
-    List<HoodieCompactionOperation> regeneratedOps = 
originalOps.stream().map(op -> {
-      // Convert to CompactionOperation
-      return CompactionUtils.buildCompactionOperation(op);
-    }).map(op2 -> {
-      // Convert back to HoodieCompactionOperation and check for equality
-      return CompactionUtils.buildHoodieCompactionOperation(op2);
-    }).collect(Collectors.toList());
+    // Convert to CompactionOperation
+    // Convert back to HoodieCompactionOperation and check for equality
+    List<HoodieCompactionOperation> regeneratedOps = originalOps.stream()
+            .map(CompactionUtils::buildCompactionOperation)
+            .map(CompactionUtils::buildHoodieCompactionOperation)
+            .collect(Collectors.toList());
     Assert.assertTrue("Transformation did get tested", originalOps.size() > 0);
     Assert.assertEquals("All fields set correctly in transformations", 
originalOps, regeneratedOps);
   }
@@ -247,11 +246,9 @@ public class TestCompactionUtils extends 
HoodieCommonTestHarness {
           op.getDataFilePath());
     }
     List<String> paths = slice.getLogFiles().map(l -> 
l.getPath().toString()).collect(Collectors.toList());
-    IntStream.range(0, paths.size()).boxed().forEach(idx -> {
-      Assert.assertEquals("Log File Index " + idx,
-          version == COMPACTION_METADATA_VERSION_1 ? paths.get(idx) : new 
Path(paths.get(idx)).getName(),
-          op.getDeltaFilePaths().get(idx));
-    });
+    IntStream.range(0, paths.size()).boxed().forEach(idx -> 
Assert.assertEquals("Log File Index " + idx,
+        version == COMPACTION_METADATA_VERSION_1 ? paths.get(idx) : new 
Path(paths.get(idx)).getName(),
+        op.getDeltaFilePaths().get(idx)));
     Assert.assertEquals("Metrics set", METRICS, op.getMetrics());
   }
 
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/SchemaDifference.java 
b/hudi-hive/src/main/java/org/apache/hudi/hive/SchemaDifference.java
index f892962..57bab64 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/SchemaDifference.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/SchemaDifference.java
@@ -18,15 +18,14 @@
 
 package org.apache.hudi.hive;
 
-import com.google.common.base.Objects;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
 import org.apache.parquet.schema.MessageType;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.StringJoiner;
 
 /**
  * Represents the schema difference between the storage schema and hive table 
schema.
@@ -43,9 +42,9 @@ public class SchemaDifference {
       Map<String, String> updateColumnTypes, Map<String, String> 
addColumnTypes) {
     this.storageSchema = storageSchema;
     this.tableSchema = tableSchema;
-    this.deleteColumns = ImmutableList.copyOf(deleteColumns);
-    this.updateColumnTypes = ImmutableMap.copyOf(updateColumnTypes);
-    this.addColumnTypes = ImmutableMap.copyOf(addColumnTypes);
+    this.deleteColumns = Collections.unmodifiableList(deleteColumns);
+    this.updateColumnTypes = Collections.unmodifiableMap(updateColumnTypes);
+    this.addColumnTypes =  Collections.unmodifiableMap(addColumnTypes);
   }
 
   public List<String> getDeleteColumns() {
@@ -60,12 +59,6 @@ public class SchemaDifference {
     return addColumnTypes;
   }
 
-  @Override
-  public String toString() {
-    return Objects.toStringHelper(this).add("deleteColumns", 
deleteColumns).add("updateColumnTypes", updateColumnTypes)
-        .add("addColumnTypes", addColumnTypes).toString();
-  }
-
   public static Builder newBuilder(MessageType storageSchema, Map<String, 
String> tableSchema) {
     return new Builder(storageSchema, tableSchema);
   }
@@ -74,6 +67,17 @@ public class SchemaDifference {
     return deleteColumns.isEmpty() && updateColumnTypes.isEmpty() && 
addColumnTypes.isEmpty();
   }
 
+  @Override
+  public String toString() {
+    return new StringJoiner(", ", SchemaDifference.class.getSimpleName() + 
"[", "]")
+           .add("storageSchema=" + storageSchema)
+           .add("tableSchema=" + tableSchema)
+           .add("deleteColumns=" + deleteColumns)
+           .add("updateColumnTypes=" + updateColumnTypes)
+           .add("addColumnTypes=" + addColumnTypes)
+           .toString();
+  }
+
   public static class Builder {
 
     private final MessageType storageSchema;
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java 
b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
index 6ca9957..d21a331 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/util/SchemaUtil.java
@@ -18,8 +18,6 @@
 
 package org.apache.hudi.hive.util;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hudi.common.model.HoodieLogFile;
@@ -42,6 +40,8 @@ import org.apache.parquet.schema.Type;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -67,7 +67,7 @@ public class SchemaUtil {
     }
     LOG.info("Getting schema difference for " + tableSchema + "\r\n\r\n" + 
newTableSchema);
     SchemaDifference.Builder schemaDiffBuilder = 
SchemaDifference.newBuilder(storageSchema, tableSchema);
-    Set<String> tableColumns = Sets.newHashSet();
+    Set<String> tableColumns = new HashSet<>();
 
     for (Map.Entry<String, String> field : tableSchema.entrySet()) {
       String fieldName = field.getKey().toLowerCase();
@@ -140,7 +140,7 @@ public class SchemaUtil {
    * @return : Hive Table schema read from parquet file MAP[String,String]
    */
   public static Map<String, String> 
convertParquetSchemaToHiveSchema(MessageType messageType) throws IOException {
-    Map<String, String> schema = Maps.newLinkedHashMap();
+    Map<String, String> schema = new LinkedHashMap<>();
     List<Type> parquetFields = messageType.getFields();
     for (Type parquetType : parquetFields) {
       StringBuilder result = new StringBuilder();
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java 
b/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
index 49692f5..7265f33 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/TestHiveSyncTool.java
@@ -24,7 +24,6 @@ import org.apache.hudi.hive.HoodieHiveClient.PartitionEvent;
 import org.apache.hudi.hive.HoodieHiveClient.PartitionEvent.PartitionEventType;
 import org.apache.hudi.hive.util.SchemaUtil;
 
-import com.google.common.collect.Lists;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.parquet.schema.MessageType;
 import org.apache.parquet.schema.OriginalType;
@@ -345,7 +344,7 @@ public class TestHiveSyncTool {
     HiveSyncConfig hiveSyncConfig = 
HiveSyncConfig.copy(TestUtil.hiveSyncConfig);
     hiveSyncConfig.partitionValueExtractorClass = 
MultiPartKeysValueExtractor.class.getCanonicalName();
     hiveSyncConfig.tableName = "multi_part_key";
-    hiveSyncConfig.partitionFields = Lists.newArrayList("year", "month", 
"day");
+    hiveSyncConfig.partitionFields = Arrays.asList("year", "month", "day");
     TestUtil.getCreatedTablesSet().add(hiveSyncConfig.databaseName + "." + 
hiveSyncConfig.tableName);
 
     HoodieHiveClient hiveClient = new HoodieHiveClient(hiveSyncConfig, 
TestUtil.getHiveConf(), TestUtil.fileSystem);
diff --git a/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java 
b/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
index 0cec024..571e949 100644
--- a/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
+++ b/hudi-hive/src/test/java/org/apache/hudi/hive/TestUtil.java
@@ -43,9 +43,6 @@ import org.apache.hudi.common.util.FileIOUtils;
 import org.apache.hudi.common.util.SchemaTestUtil;
 import org.apache.hudi.hive.util.HiveTestService;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.IndexedRecord;
 import org.apache.hadoop.conf.Configuration;
@@ -68,6 +65,10 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -86,7 +87,7 @@ public class TestUtil {
   static HiveSyncConfig hiveSyncConfig;
   private static DateTimeFormatter dtfOut;
   static FileSystem fileSystem;
-  private static Set<String> createdTablesSet = Sets.newHashSet();
+  private static Set<String> createdTablesSet = new HashSet<>();
 
   public static void setUp() throws IOException, InterruptedException {
     if (dfsCluster == null) {
@@ -114,7 +115,7 @@ public class TestUtil {
     hiveSyncConfig.basePath = "/tmp/hdfs/TestHiveSyncTool/";
     hiveSyncConfig.assumeDatePartitioning = true;
     hiveSyncConfig.usePreApacheInputFormat = false;
-    hiveSyncConfig.partitionFields = Lists.newArrayList("datestr");
+    hiveSyncConfig.partitionFields = Collections.singletonList("datestr");
 
     dtfOut = DateTimeFormat.forPattern("yyyy/MM/dd");
 
@@ -249,7 +250,7 @@ public class TestUtil {
 
   private static List<HoodieWriteStat> createTestData(Path partPath, boolean 
isParquetSchemaSimple, String commitTime)
       throws IOException, URISyntaxException {
-    List<HoodieWriteStat> writeStats = Lists.newArrayList();
+    List<HoodieWriteStat> writeStats = new ArrayList<>();
     for (int i = 0; i < 5; i++) {
       // Create 5 files
       String fileId = UUID.randomUUID().toString();
@@ -297,7 +298,7 @@ public class TestUtil {
         .overBaseCommit(dataFile.getCommitTime()).withFs(fileSystem).build();
     List<IndexedRecord> records = (isLogSchemaSimple ? 
SchemaTestUtil.generateTestRecords(0, 100)
         : SchemaTestUtil.generateEvolvedTestRecords(100, 100));
-    Map<HeaderMetadataType, String> header = Maps.newHashMap();
+    Map<HeaderMetadataType, String> header = new HashMap<>(2);
     header.put(HoodieLogBlock.HeaderMetadataType.INSTANT_TIME, 
dataFile.getCommitTime());
     header.put(HoodieLogBlock.HeaderMetadataType.SCHEMA, schema.toString());
     HoodieAvroDataBlock dataBlock = new HoodieAvroDataBlock(records, header);
diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/UtilHelpers.java 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/UtilHelpers.java
index 7812925..27a7f9e 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/UtilHelpers.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/UtilHelpers.java
@@ -68,11 +68,12 @@ import java.sql.SQLException;
 import java.sql.DriverManager;
 import java.sql.Driver;
 import java.util.Arrays;
+import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Properties;
-import java.util.Enumeration;
 
 /**
  * Bunch of helper methods.
@@ -279,14 +280,14 @@ public class UtilHelpers {
       }
     }
 
-    Preconditions.checkNotNull(driver, String.format("Did not find registered 
driver with class %s", driverClass));
+    Objects.requireNonNull(driver, String.format("Did not find registered 
driver with class %s", driverClass));
 
     Properties properties = new Properties();
     properties.putAll(options);
-    Connection connect = null;
+    Connection connect;
     String url = options.get(JDBCOptions.JDBC_URL());
     connect = driver.connect(url, properties);
-    Preconditions.checkNotNull(connect, String.format("The driver could not 
open a JDBC connection. Check the URL: %s", url));
+    Objects.requireNonNull(connect, String.format("The driver could not open a 
JDBC connection. Check the URL: %s", url));
     return connect;
   }
 

Reply via email to