Repository: hive
Updated Branches:
  refs/heads/master 12041d39f -> ccc82cfb0


HIVE-18888: Replace synchronizedMap with ConcurrentHashMap (Alexander Kolbasov, 
reviewed by Peter Vary, Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ccc82cfb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ccc82cfb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ccc82cfb

Branch: refs/heads/master
Commit: ccc82cfb0dc1d7c5a0d4a193eb0da434ce8a0cfb
Parents: 12041d3
Author: Alexander Kolbasov <ak...@cloudera.com>
Authored: Tue Mar 13 09:37:10 2018 -0700
Committer: Sahil Takiar <stak...@cloudera.com>
Committed: Tue Mar 13 09:37:10 2018 -0700

----------------------------------------------------------------------
 .../ql/exec/tez/DynamicValueRegistryTez.java     | 19 +++++++------------
 .../org/apache/hadoop/hive/ql/metadata/Hive.java | 14 ++------------
 .../hadoop/hive/metastore/HiveMetaStore.java     |  7 +++----
 3 files changed, 12 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ccc82cfb/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java
index 0bed22a..ec1e84b 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java
@@ -18,35 +18,30 @@
 
 package org.apache.hadoop.hive.ql.exec.tez;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.exec.DynamicValueRegistry;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
-import org.apache.hadoop.hive.ql.exec.DynamicValueRegistry;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.RuntimeValuesInfo;
 import org.apache.hadoop.hive.ql.plan.BaseWork;
 import org.apache.hadoop.hive.ql.plan.DynamicValue.NoDynamicValuesException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.tez.runtime.api.Input;
 import org.apache.tez.runtime.api.LogicalInput;
 import org.apache.tez.runtime.api.ProcessorContext;
 import org.apache.tez.runtime.library.api.KeyValueReader;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
 public class DynamicValueRegistryTez implements DynamicValueRegistry {
   private static final Logger LOG = 
LoggerFactory.getLogger(DynamicValueRegistryTez.class);
 
@@ -66,7 +61,7 @@ public class DynamicValueRegistryTez implements 
DynamicValueRegistry {
     }
   }
 
-  protected Map<String, Object> values = Collections.synchronizedMap(new 
HashMap<String, Object>());
+  protected Map<String, Object> values = new ConcurrentHashMap<>();
 
   public DynamicValueRegistryTez() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ccc82cfb/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 6b635fc..5a7e297 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -37,7 +37,6 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.LinkedList;
@@ -174,13 +173,6 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Splitter;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
 /**
  * This class has functions that implement meta data/DDL operations using calls
  * to the metastore.
@@ -2064,7 +2056,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
     try {
       // for each dynamically created DP directory, construct a full partition 
spec
       // and load the partition based on that
-      final Map<Long, RawStore> rawStoreMap = Collections.synchronizedMap(new 
HashMap<Long, RawStore>());
+      final Map<Long, RawStore> rawStoreMap = new ConcurrentHashMap<>();
       for(final Path partPath : validPartitions) {
         // generate a full partition specification
         final LinkedHashMap<String, String> fullPartSpec = 
Maps.newLinkedHashMap(partSpec);
@@ -2123,9 +2115,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
         future.get();
       }
 
-      for (RawStore rs : rawStoreMap.values()) {
-        rs.shutdown();
-      }
+      rawStoreMap.forEach((k, rs) -> rs.shutdown());
     } catch (InterruptedException | ExecutionException e) {
       LOG.debug("Cancelling " + futures.size() + " dynamic loading tasks");
       //cancel other futures

http://git-wip-us.apache.org/repos/asf/hive/blob/ccc82cfb/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index db09407..66353e7 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -43,6 +43,7 @@ import java.util.Objects;
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -2867,8 +2868,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       logInfo("add_partitions");
       boolean success = false;
       // Ensures that the list doesn't have dups, and keeps track of 
directories we have created.
-      final Map<PartValEqWrapper, Boolean> addedPartitions =
-          Collections.synchronizedMap(new HashMap<PartValEqWrapper, 
Boolean>());
+      final Map<PartValEqWrapper, Boolean> addedPartitions = new 
ConcurrentHashMap<>();
       final List<Partition> newParts = new ArrayList<>();
       final List<Partition> existingParts = new ArrayList<>();
       Table tbl = null;
@@ -3078,8 +3078,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         throws TException {
       boolean success = false;
       // Ensures that the list doesn't have dups, and keeps track of 
directories we have created.
-      final Map<PartValEqWrapperLite, Boolean> addedPartitions =
-          Collections.synchronizedMap(new HashMap<PartValEqWrapperLite, 
Boolean>());
+      final Map<PartValEqWrapperLite, Boolean> addedPartitions = new 
ConcurrentHashMap<>();
       PartitionSpecProxy partitionSpecProxy = 
PartitionSpecProxy.Factory.get(partSpecs);
       final PartitionSpecProxy.PartitionIterator partitionIterator = 
partitionSpecProxy
           .getPartitionIterator();

Reply via email to