This is an automated email from the ASF dual-hosted git repository.

pifta pushed a commit to branch HDDS-5447-httpfs
in repository https://gitbox.apache.org/repos/asf/ozone.git


The following commit(s) were added to refs/heads/HDDS-5447-httpfs by this push:
     new 8eabe29  HDDS-5826 [HTTPFSGW] Remove or replace Hadoop shaded guava 
dependencies. (#2735)
8eabe29 is described below

commit 8eabe292f570dfd602df80efdf3338dc7865c480
Author: dombizita <[email protected]>
AuthorDate: Mon Oct 18 09:37:13 2021 +0200

    HDDS-5826 [HTTPFSGW] Remove or replace Hadoop shaded guava dependencies. 
(#2735)
---
 .../hadoop/fs/http/client/HttpFSFileSystem.java       | 19 ++++++++-----------
 .../apache/hadoop/fs/http/server/HttpFSServer.java    |  4 ++--
 .../org/apache/hadoop/fs/http/server/JsonUtil.java    |  3 +--
 .../org/apache/hadoop/lib/servlet/ServerWebApp.java   |  2 +-
 .../java/org/apache/hadoop/lib/wsrs/Parameters.java   |  5 ++---
 .../apache/hadoop/lib/wsrs/ParametersProvider.java    |  4 ++--
 6 files changed, 16 insertions(+), 21 deletions(-)

diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
index 7590178..dd213f3 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -24,7 +24,6 @@ import java.util.EnumSet;
 import java.util.List;
 
 import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonPathCapabilities;
@@ -66,15 +65,12 @@ import org.apache.hadoop.util.HttpExceptionUtils;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.ratis.thirdparty.com.google.common.base.Preconditions;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
-import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
-import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
-
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.DataInput;
@@ -88,6 +84,7 @@ import java.net.HttpURLConnection;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.text.MessageFormat;
 import java.util.HashMap;
@@ -802,7 +799,7 @@ public class HttpFSFileSystem extends FileSystem
     Map<String, String> params = new HashMap<String, String>();
     params.put(OP_PARAM, Operation.LISTSTATUS_BATCH.toString());
     if (token != null) {
-      params.put(START_AFTER_PARAM, new String(token, Charsets.UTF_8));
+      params.put(START_AFTER_PARAM, new String(token, StandardCharsets.UTF_8));
     }
     HttpURLConnection conn = getConnection(
         Operation.LISTSTATUS_BATCH.getMethod(),
@@ -817,7 +814,7 @@ public class HttpFSFileSystem extends FileSystem
     byte[] newToken = null;
     if (statuses.length > 0) {
       newToken = statuses[statuses.length - 1].getPath().getName().toString()
-          .getBytes(Charsets.UTF_8);
+          .getBytes(StandardCharsets.UTF_8);
     }
     // Parse the remainingEntries boolean into hasMore
     final long remainingEntries = (Long) listing.get(REMAINING_ENTRIES_JSON);
@@ -1402,13 +1399,13 @@ public class HttpFSFileSystem extends FileSystem
     JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
     Map<String, byte[]> xAttrs = createXAttrMap(
         (JSONArray) json.get(XATTRS_JSON));
-    return xAttrs != null ? xAttrs.get(name) : null;
+    return xAttrs.get(name);
   }
 
   /** Convert xAttrs json to xAttrs map. */
   private Map<String, byte[]> createXAttrMap(JSONArray jsonArray) 
       throws IOException {
-    Map<String, byte[]> xAttrs = Maps.newHashMap();
+    Map<String, byte[]> xAttrs = new HashMap<>();
     for (Object obj : jsonArray) {
       JSONObject jsonObj = (JSONObject) obj;
       final String name = (String)jsonObj.get(XATTR_NAME_JSON);
@@ -1427,7 +1424,7 @@ public class HttpFSFileSystem extends FileSystem
     JSONArray jsonArray;
     try {
       jsonArray = (JSONArray)parser.parse(xattrNamesStr);
-      List<String> names = Lists.newArrayListWithCapacity(jsonArray.size());
+      List<String> names = new ArrayList<>(jsonArray.size());
       for (Object name : jsonArray) {
         names.add((String) name);
       }
@@ -1455,7 +1452,7 @@ public class HttpFSFileSystem extends FileSystem
         "XAttr names cannot be null or empty.");
     Map<String, String> params = new HashMap<String, String>();
     params.put(OP_PARAM, Operation.GETXATTRS.toString());
-    Map<String, List<String>> multiValuedParams = Maps.newHashMap();
+    Map<String, List<String>> multiValuedParams = new HashMap<>();
     multiValuedParams.put(XATTR_NAME_PARAM, names);
     HttpURLConnection conn = getConnection(Operation.GETXATTRS.getMethod(),
         params, multiValuedParams, f, true);
diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
index de21b45..2b34a14 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.fs.http.server;
 
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -89,6 +88,7 @@ import javax.ws.rs.core.UriInfo;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
+import java.nio.charset.StandardCharsets;
 import java.security.AccessControlException;
 import java.security.PrivilegedExceptionAction;
 import java.text.MessageFormat;
@@ -452,7 +452,7 @@ public class HttpFSServer {
         HttpFSParametersProvider.StartAfterParam.class);
     byte[] token = HttpFSUtils.EMPTY_BYTES;
     if (startAfter != null) {
-      token = startAfter.getBytes(Charsets.UTF_8);
+      token = startAfter.getBytes(StandardCharsets.UTF_8);
     }
     FSOperations.FSListStatusBatch command = new FSOperations
         .FSListStatusBatch(path, token);
diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/JsonUtil.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/JsonUtil.java
index a14c44d..219e3bb 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/JsonUtil.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/fs/http/server/JsonUtil.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hdfs.protocol.*;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
 import org.apache.hadoop.util.StringUtils;
 
 import java.io.IOException;
@@ -366,7 +365,7 @@ final class JsonUtil {
   
   public static String toJsonString(final List<XAttr> xAttrs)
       throws IOException {
-    final List<String> names = Lists.newArrayListWithCapacity(xAttrs.size());
+    final List<String> names = new ArrayList<>(xAttrs.size());
     for (XAttr xAttr : xAttrs) {
       names.add(XAttrHelper.getPrefixedName(xAttr));
     }
diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
index efccee7..7a40325 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.lib.servlet;
 
-import 
org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.lib.server.Server;
 import org.apache.hadoop.lib.server.ServerException;
+import 
org.apache.ratis.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
index 05ee15b..989b507 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
@@ -19,8 +19,7 @@ package org.apache.hadoop.lib.wsrs;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 
-import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
-
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -69,7 +68,7 @@ public class Parameters {
   public <V, T extends Param<V>> List<V> getValues(String name,
                                                    Class<T> klass) {
     List<Param<?>> multiParams = (List<Param<?>>)params.get(name);
-    List<V> values = Lists.newArrayList();
+    List<V> values = new ArrayList<>();
     if (multiParams != null) {
       for (Param<?> param : multiParams) {
         V value = ((T) param).value();
diff --git 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
index dc3f1a1..3ddac81 100644
--- 
a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
+++ 
b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.lib.wsrs;
 
-import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
 import com.sun.jersey.api.core.HttpContext;
 import com.sun.jersey.core.spi.component.ComponentContext;
 import com.sun.jersey.core.spi.component.ComponentScope;
@@ -32,6 +31,7 @@ import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MultivaluedMap;
 import java.lang.reflect.Type;
 import java.text.MessageFormat;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -82,7 +82,7 @@ public class ParametersProvider
     }
     for (Class<Param<?>> paramClass : paramsDef.get(op)) {
       Param<?> param = newParam(paramClass);
-      List<Param<?>> paramList = Lists.newArrayList();
+      List<Param<?>> paramList = new ArrayList<>();
       List<String> ps = queryString.get(param.getName());
       if (ps != null) {
         for (String p : ps) {

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to