HADOOP-14929. Cleanup usage of decodecomponent and use QueryStringDecoder from 
netty. Contributed by Bharat Viswanadham.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1d6f8beb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1d6f8beb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1d6f8beb

Branch: refs/heads/YARN-6592
Commit: 1d6f8bebe9d20c958e419c140109e3d9fec8cb46
Parents: 8a1bd9a
Author: Arpit Agarwal <a...@apache.org>
Authored: Fri Nov 10 16:28:12 2017 -0800
Committer: Arpit Agarwal <a...@apache.org>
Committed: Fri Nov 10 16:28:12 2017 -0800

----------------------------------------------------------------------
 .../datanode/web/webhdfs/ParameterParser.java   | 66 ++--------------
 .../web/webhdfs/TestParameterParser.java        | 81 +++++++++++++++++++-
 2 files changed, 87 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1d6f8beb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java
index 16380e5..2b3a393 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.security.token.Token;
 
 import java.io.IOException;
 import java.net.URI;
-import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.util.EnumSet;
 import java.util.List;
@@ -143,9 +142,13 @@ class ParameterParser {
   }
 
   public EnumSet<CreateFlag> createFlag() {
-    String cf =
-        decodeComponent(param(CreateFlagParam.NAME), StandardCharsets.UTF_8);
-
+    String cf = "";
+    if (param(CreateFlagParam.NAME) != null) {
+      QueryStringDecoder decoder = new QueryStringDecoder(
+          param(CreateFlagParam.NAME),
+          StandardCharsets.UTF_8);
+      cf = decoder.path();
+    }
     return new CreateFlagParam(cf).getValue();
   }
 
@@ -159,61 +162,6 @@ class ParameterParser {
   }
 
   /**
-   * The following function behaves exactly the same as netty's
-   * <code>QueryStringDecoder#decodeComponent</code> except that it
-   * does not decode the '+' character as space. WebHDFS takes this scheme
-   * to maintain the backward-compatibility for pre-2.7 releases.
-   */
-  private static String decodeComponent(final String s, final Charset charset) 
{
-    if (s == null) {
-      return "";
-    }
-    final int size = s.length();
-    boolean modified = false;
-    for (int i = 0; i < size; i++) {
-      final char c = s.charAt(i);
-      if (c == '%' || c == '+') {
-        modified = true;
-        break;
-      }
-    }
-    if (!modified) {
-      return s;
-    }
-    final byte[] buf = new byte[size];
-    int pos = 0;  // position in `buf'.
-    for (int i = 0; i < size; i++) {
-      char c = s.charAt(i);
-      if (c == '%') {
-        if (i == size - 1) {
-          throw new IllegalArgumentException("unterminated escape sequence at" 
+
-                                                 " end of string: " + s);
-        }
-        c = s.charAt(++i);
-        if (c == '%') {
-          buf[pos++] = '%';  // "%%" -> "%"
-          break;
-        }
-        if (i == size - 1) {
-          throw new IllegalArgumentException("partial escape sequence at end " 
+
-                                                 "of string: " + s);
-        }
-        c = decodeHexNibble(c);
-        final char c2 = decodeHexNibble(s.charAt(++i));
-        if (c == Character.MAX_VALUE || c2 == Character.MAX_VALUE) {
-          throw new IllegalArgumentException(
-              "invalid escape sequence `%" + s.charAt(i - 1) + s.charAt(
-                  i) + "' at index " + (i - 2) + " of: " + s);
-        }
-        c = (char) (c * 16 + c2);
-        // Fall through.
-      }
-      buf[pos++] = (byte) c;
-    }
-    return new String(buf, 0, pos, charset);
-  }
-
-  /**
    * Helper to decode half of a hexadecimal number from a string.
    * @param c The ASCII character of the hexadecimal number to decode.
    * Must be in the range {@code [0-9a-fA-F]}.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1d6f8beb/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java
index 59fd18f..235d051 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/TestParameterParser.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.datanode.web.webhdfs;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.HAUtilClient;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
@@ -25,14 +26,17 @@ import org.apache.hadoop.hdfs.web.resources.DelegationParam;
 import org.apache.hadoop.hdfs.web.resources.NamenodeAddressParam;
 import org.apache.hadoop.hdfs.web.resources.OffsetParam;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
+import static org.junit.Assert.fail;
+
 import io.netty.handler.codec.http.QueryStringDecoder;
 
 import java.io.IOException;
+import java.util.EnumSet;
 
-import static org.mockito.Mockito.mock;
 
 public class TestParameterParser {
   private static final String LOGICAL_NAME = "minidfs";
@@ -64,6 +68,81 @@ public class TestParameterParser {
   }
 
   @Test
+  public void testCreateFlag() {
+    final String path = "/test1?createflag=append,sync_block";
+    Configuration conf = new Configuration();
+    QueryStringDecoder decoder = new QueryStringDecoder(
+        WebHdfsHandler.WEBHDFS_PREFIX + path);
+    ParameterParser testParser = new ParameterParser(decoder, conf);
+    EnumSet<CreateFlag> actual = testParser.createFlag();
+    EnumSet<CreateFlag> expected = EnumSet.of(CreateFlag.APPEND,
+        CreateFlag.SYNC_BLOCK);
+    Assert.assertEquals(expected.toString(), actual.toString());
+
+
+    final String path1 = "/test1?createflag=append";
+    decoder = new QueryStringDecoder(
+        WebHdfsHandler.WEBHDFS_PREFIX + path1);
+    testParser = new ParameterParser(decoder, conf);
+
+    actual = testParser.createFlag();
+    expected = EnumSet.of(CreateFlag.APPEND);
+    Assert.assertEquals(expected, actual);
+
+    final String path2 = "/test1";
+    decoder = new QueryStringDecoder(
+        WebHdfsHandler.WEBHDFS_PREFIX + path2);
+    testParser = new ParameterParser(decoder, conf);
+    actual = testParser.createFlag();
+    Assert.assertEquals(0, actual.size());
+
+    final String path3 = "/test1?createflag=create,overwrite";
+    decoder = new QueryStringDecoder(
+        WebHdfsHandler.WEBHDFS_PREFIX + path3);
+    testParser = new ParameterParser(decoder, conf);
+    actual = testParser.createFlag();
+    expected = EnumSet.of(CreateFlag.CREATE, CreateFlag
+        .OVERWRITE);
+    Assert.assertEquals(expected.toString(), actual.toString());
+
+
+    final String path4 = "/test1?createflag=";
+    decoder = new QueryStringDecoder(
+        WebHdfsHandler.WEBHDFS_PREFIX + path4);
+    testParser = new ParameterParser(decoder, conf);
+    actual = testParser.createFlag();
+    Assert.assertEquals(0, actual.size());
+
+    //Incorrect value passed to createflag
+    try {
+      final String path5 = "/test1?createflag=overwrite,";
+      decoder = new QueryStringDecoder(
+          WebHdfsHandler.WEBHDFS_PREFIX + path5);
+      testParser = new ParameterParser(decoder, conf);
+      actual = testParser.createFlag();
+      fail("It should throw Illegal Argument Exception");
+    } catch (Exception e) {
+      GenericTestUtils
+          .assertExceptionContains("No enum constant", e);
+    }
+
+    //Incorrect value passed to createflag
+    try {
+      final String path6 = "/test1?createflag=,";
+      decoder = new QueryStringDecoder(
+          WebHdfsHandler.WEBHDFS_PREFIX + path6);
+      testParser = new ParameterParser(decoder, conf);
+      actual = testParser.createFlag();
+      fail("It should throw Illegal Argument Exception");
+    } catch (Exception e) {
+      GenericTestUtils
+          .assertExceptionContains("No enum constant", e);
+    }
+
+
+  }
+
+  @Test
   public void testOffset() throws IOException {
     final long X = 42;
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to