HDFS-13877. HttpFS: Implement GETSNAPSHOTDIFF. Contributed by Siyao Meng.

Signed-off-by: Wei-Chiu Chuang <weic...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/396ce0d9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/396ce0d9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/396ce0d9

Branch: refs/heads/HEAD
Commit: 396ce0d9f470a5e8af03987ad6396d0f08b3d225
Parents: e60b797
Author: Siyao Meng <sm...@cloudera.com>
Authored: Thu Oct 4 15:16:17 2018 -0700
Committer: Wei-Chiu Chuang <weic...@apache.org>
Committed: Thu Oct 4 15:17:47 2018 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hdfs/web/JsonUtilClient.java  |  5 +-
 .../hadoop/fs/http/client/HttpFSFileSystem.java | 17 +++-
 .../hadoop/fs/http/server/FSOperations.java     | 50 ++++++++++
 .../http/server/HttpFSParametersProvider.java   |  3 +
 .../hadoop/fs/http/server/HttpFSServer.java     | 13 +++
 .../fs/http/client/BaseTestHttpFSWith.java      | 99 +++++++++++++++++++-
 .../hadoop/fs/http/server/TestHttpFSServer.java | 96 +++++++++++++++++++
 7 files changed, 280 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index a685573..95ccb4b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -68,7 +68,10 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 
-class JsonUtilClient {
+/**
+ * Utility methods used in WebHDFS/HttpFS JSON conversion.
+ */
+public class JsonUtilClient {
   static final DatanodeInfo[] EMPTY_DATANODE_INFO_ARRAY = {};
   static final String UNSUPPPORTED_EXCEPTION_STR =
       UnsupportedOperationException.class.getName();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
index f87a6e3..3904a87 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -45,6 +45,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
+import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
+import org.apache.hadoop.hdfs.web.JsonUtilClient;
 import org.apache.hadoop.lib.wsrs.EnumSetParam;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
@@ -232,7 +234,7 @@ public class HttpFSFileSystem extends FileSystem
     SETSTORAGEPOLICY(HTTP_PUT), UNSETSTORAGEPOLICY(HTTP_POST),
     ALLOWSNAPSHOT(HTTP_PUT), DISALLOWSNAPSHOT(HTTP_PUT),
     CREATESNAPSHOT(HTTP_PUT), DELETESNAPSHOT(HTTP_DELETE),
-    RENAMESNAPSHOT(HTTP_PUT);
+    RENAMESNAPSHOT(HTTP_PUT), GETSNAPSHOTDIFF(HTTP_GET);
 
     private String httpMethod;
 
@@ -1467,4 +1469,17 @@ public class HttpFSFileSystem extends FileSystem
     HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
   }
 
+  public SnapshotDiffReport getSnapshotDiffReport(Path path,
+      String snapshotOldName, String snapshotNewName) throws IOException {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, Operation.GETSNAPSHOTDIFF.toString());
+    params.put(SNAPSHOT_NAME_PARAM, snapshotNewName);
+    params.put(OLD_SNAPSHOT_NAME_PARAM, snapshotOldName);
+    HttpURLConnection conn = getConnection(
+        Operation.GETSNAPSHOTDIFF.getMethod(), params, path, true);
+    HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+    JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
+    return JsonUtilClient.toSnapshotDiffReport(json);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
index 07fa180..ed7628f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.fs.permission.AclStatus;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
+import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
+import org.apache.hadoop.hdfs.web.JsonUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.lib.service.FileSystemAccess;
 import org.apache.hadoop.util.StringUtils;
@@ -1651,4 +1653,52 @@ public class FSOperations {
       return null;
     }
   }
+
+  /**
+   *  Executor that performs a getSnapshotDiff operation.
+   */
+  @InterfaceAudience.Private
+  public static class FSGetSnapshotDiff implements
+      FileSystemAccess.FileSystemExecutor<String> {
+    private Path path;
+    private String oldSnapshotName;
+    private String snapshotName;
+
+    /**
+     * Creates a getSnapshotDiff executor.
+     * @param path directory path of the snapshots to be examined.
+     * @param oldSnapshotName Older snapshot name.
+     * @param snapshotName Newer snapshot name.
+     */
+    public FSGetSnapshotDiff(String path, String oldSnapshotName,
+        String snapshotName) {
+      this.path = new Path(path);
+      this.oldSnapshotName = oldSnapshotName;
+      this.snapshotName = snapshotName;
+    }
+
+    /**
+     * Executes the filesystem operation.
+     * @param fs filesystem instance to use.
+     * @return A serialized JSON string of snapshot diffs.
+     * @throws IOException thrown if an IO error occurred.
+     */
+    @Override
+    public String execute(FileSystem fs) throws IOException {
+      SnapshotDiffReport sdr = null;
+      if (fs instanceof DistributedFileSystem) {
+        DistributedFileSystem dfs = (DistributedFileSystem) fs;
+        sdr = dfs.getSnapshotDiffReport(path, oldSnapshotName, snapshotName);
+      } else {
+        throw new UnsupportedOperationException("getSnapshotDiff is not "
+            + "supported for HttpFs on " + fs.getClass()
+            + ". Please check your fs.defaultFS configuration");
+      }
+      if (sdr != null) {
+        return JsonUtil.toJsonString(sdr);
+      } else {
+        return "";
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
index 769c33d..5301527 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
@@ -110,6 +110,9 @@ public class HttpFSParametersProvider extends 
ParametersProvider {
     PARAMS_DEF.put(Operation.RENAMESNAPSHOT,
             new Class[] {OldSnapshotNameParam.class,
                 SnapshotNameParam.class});
+    PARAMS_DEF.put(Operation.GETSNAPSHOTDIFF,
+        new Class[] {OldSnapshotNameParam.class,
+            SnapshotNameParam.class});
   }
 
   public HttpFSParametersProvider() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
index ab94ef9..f510061 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
@@ -366,6 +366,19 @@ public class HttpFSServer {
       response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
       break;
     }
+    case GETSNAPSHOTDIFF: {
+      String oldSnapshotName = params.get(OldSnapshotNameParam.NAME,
+          OldSnapshotNameParam.class);
+      String snapshotName = params.get(SnapshotNameParam.NAME,
+          SnapshotNameParam.class);
+      FSOperations.FSGetSnapshotDiff command =
+          new FSOperations.FSGetSnapshotDiff(path, oldSnapshotName,
+              snapshotName);
+      String js = fsExecute(user, command);
+      AUDIT_LOG.info("[{}]", path);
+      response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+      break;
+    }
     default: {
       throw new IOException(
           MessageFormat.format("Invalid HTTP GET operation [{0}]", 
op.value()));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
index 6680976..cd9d3b9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
@@ -39,8 +39,10 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
+import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshotException;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.HFSTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
@@ -1069,7 +1071,7 @@ public abstract class BaseTestHttpFSWith extends 
HFSTestCase {
     GETTRASHROOT, STORAGEPOLICY, ERASURE_CODING,
     CREATE_SNAPSHOT, RENAME_SNAPSHOT, DELETE_SNAPSHOT,
     ALLOW_SNAPSHOT, DISALLOW_SNAPSHOT, DISALLOW_SNAPSHOT_EXCEPTION,
-    FILE_STATUS_ATTR
+    FILE_STATUS_ATTR, GET_SNAPSHOT_DIFF
   }
 
   private void operation(Operation op) throws Exception {
@@ -1179,6 +1181,10 @@ public abstract class BaseTestHttpFSWith extends 
HFSTestCase {
     case FILE_STATUS_ATTR:
       testFileStatusAttr();
       break;
+    case GET_SNAPSHOT_DIFF:
+      testGetSnapshotDiff();
+      testGetSnapshotDiffIllegalParam();
+      break;
     }
   }
 
@@ -1432,4 +1438,95 @@ public abstract class BaseTestHttpFSWith extends 
HFSTestCase {
       fs.delete(path, true);
     }
   }
+
+  private void testGetSnapshotDiff() throws Exception {
+    if (!this.isLocalFS()) {
+      // Create a directory with snapshot allowed
+      Path path = new Path("/tmp/tmp-snap-test");
+      createSnapshotTestsPreconditions(path);
+      // Get the FileSystem instance that's being tested
+      FileSystem fs = this.getHttpFSFileSystem();
+      // Check FileStatus
+      Assert.assertTrue(fs.getFileStatus(path).isSnapshotEnabled());
+      // Create a file and take a snapshot
+      Path file1 = new Path(path, "file1");
+      testCreate(file1, false);
+      fs.createSnapshot(path, "snap1");
+      // Create another file and take a snapshot
+      Path file2 = new Path(path, "file2");
+      testCreate(file2, false);
+      fs.createSnapshot(path, "snap2");
+      // Get snapshot diff
+      SnapshotDiffReport diffReport = null;
+      if (fs instanceof HttpFSFileSystem) {
+        HttpFSFileSystem httpFS = (HttpFSFileSystem) fs;
+        diffReport = httpFS.getSnapshotDiffReport(path, "snap1", "snap2");
+      } else if (fs instanceof WebHdfsFileSystem) {
+        WebHdfsFileSystem webHdfsFileSystem = (WebHdfsFileSystem) fs;
+        diffReport = webHdfsFileSystem.getSnapshotDiffReport(path,
+            "snap1", "snap2");
+      } else {
+        Assert.fail(fs.getClass().getSimpleName() +
+            " doesn't support getSnapshotDiff");
+      }
+      // Verify result with DFS
+      DistributedFileSystem dfs = (DistributedFileSystem)
+          FileSystem.get(path.toUri(), this.getProxiedFSConf());
+      SnapshotDiffReport dfsDiffReport =
+          dfs.getSnapshotDiffReport(path, "snap1", "snap2");
+      Assert.assertEquals(diffReport.toString(), dfsDiffReport.toString());
+      // Cleanup
+      fs.deleteSnapshot(path, "snap2");
+      fs.deleteSnapshot(path, "snap1");
+      fs.delete(path, true);
+    }
+  }
+
+  private void testGetSnapshotDiffIllegalParamCase(FileSystem fs, Path path,
+      String oldsnapshotname, String snapshotname) throws IOException {
+    try {
+      if (fs instanceof HttpFSFileSystem) {
+        HttpFSFileSystem httpFS = (HttpFSFileSystem) fs;
+        httpFS.getSnapshotDiffReport(path, oldsnapshotname, snapshotname);
+      } else if (fs instanceof WebHdfsFileSystem) {
+        WebHdfsFileSystem webHdfsFileSystem = (WebHdfsFileSystem) fs;
+        webHdfsFileSystem.getSnapshotDiffReport(path, oldsnapshotname,
+            snapshotname);
+      } else {
+        Assert.fail(fs.getClass().getSimpleName() +
+            " doesn't support getSnapshotDiff");
+      }
+    } catch (SnapshotException|IllegalArgumentException|RemoteException e) {
+      // Expect SnapshotException, IllegalArgumentException
+      // or RemoteException(IllegalArgumentException)
+      if (e instanceof RemoteException) {
+        // Check RemoteException class name, should be IllegalArgumentException
+        Assert.assertEquals(((RemoteException) e).getClassName()
+            .compareTo(java.lang.IllegalArgumentException.class.getName()), 0);
+      }
+      return;
+    }
+    Assert.fail("getSnapshotDiff illegal param didn't throw Exception");
+  }
+
+  private void testGetSnapshotDiffIllegalParam() throws Exception {
+    if (!this.isLocalFS()) {
+      // Create a directory with snapshot allowed
+      Path path = new Path("/tmp/tmp-snap-test");
+      createSnapshotTestsPreconditions(path);
+      // Get the FileSystem instance that's being tested
+      FileSystem fs = this.getHttpFSFileSystem();
+      // Check FileStatus
+      assertTrue("Snapshot should be allowed by DFS",
+          fs.getFileStatus(path).isSnapshotEnabled());
+      Assert.assertTrue(fs.getFileStatus(path).isSnapshotEnabled());
+      // Get snapshot diff
+      testGetSnapshotDiffIllegalParamCase(fs, path, "", "");
+      testGetSnapshotDiffIllegalParamCase(fs, path, "snap1", "");
+      testGetSnapshotDiffIllegalParamCase(fs, path, "", "snap2");
+      testGetSnapshotDiffIllegalParamCase(fs, path, "snap1", "snap2");
+      // Cleanup
+      fs.delete(path, true);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/396ce0d9/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index 98fc541..f024c95 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.fs.http.server;
 
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
+import org.apache.hadoop.hdfs.web.JsonUtil;
 import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import 
org.apache.hadoop.security.authentication.util.StringSignerSecretProviderCreator;
 import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
@@ -1304,4 +1306,98 @@ public class TestHttpFSServer extends HFSTestCase {
         "LISTSTATUS");
     Assert.assertFalse(result.contains("snap-to-delete"));
   }
+
+  private HttpURLConnection sendRequestToHttpFSServer(String path, String op,
+      String additionalParams) throws Exception {
+    String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
+    URL url = new URL(TestJettyHelper.getJettyURL(), MessageFormat.format(
+        "/webhdfs/v1{0}?user.name={1}&op={2}&{3}",
+        path, user, op, additionalParams));
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    conn.setRequestMethod("GET");
+    conn.connect();
+    return conn;
+  }
+
+  private HttpURLConnection sendRequestGetSnapshotDiff(String path,
+      String oldsnapshotname, String snapshotname) throws Exception{
+    return sendRequestToHttpFSServer(path, "GETSNAPSHOTDIFF",
+        MessageFormat.format("oldsnapshotname={0}&snapshotname={1}",
+            oldsnapshotname, snapshotname));
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testGetSnapshotDiff() throws Exception {
+    createHttpFSServer(false, false);
+    // Create a test directory
+    String pathStr = "/tmp/tmp-snap-diff-test";
+    createDirWithHttp(pathStr, "700", null);
+
+    Path path = new Path(pathStr);
+    DistributedFileSystem dfs = (DistributedFileSystem) FileSystem.get(
+        path.toUri(), TestHdfsHelper.getHdfsConf());
+    // Enable snapshot
+    dfs.allowSnapshot(path);
+    Assert.assertTrue(dfs.getFileStatus(path).isSnapshotEnabled());
+    // Create a file and take a snapshot
+    String file1 = pathStr + "/file1";
+    createWithHttp(file1, null);
+    dfs.createSnapshot(path, "snap1");
+    // Create another file and take a snapshot
+    String file2 = pathStr + "/file2";
+    createWithHttp(file2, null);
+    dfs.createSnapshot(path, "snap2");
+
+    // Send a request with GETSNAPSHOTDIFF API
+    HttpURLConnection conn = sendRequestGetSnapshotDiff(pathStr,
+        "snap1", "snap2");
+    // Should return HTTP_OK
+    Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    // Verify the response
+    BufferedReader reader =
+        new BufferedReader(new InputStreamReader(conn.getInputStream()));
+    // The response should be a one-line JSON string.
+    String result = reader.readLine();
+    // Verify the content of diff with DFS API.
+    SnapshotDiffReport dfsDiffReport = dfs.getSnapshotDiffReport(path,
+        "snap1", "snap2");
+    Assert.assertEquals(result, JsonUtil.toJsonString(dfsDiffReport));
+    // Clean up
+    dfs.deleteSnapshot(path, "snap2");
+    dfs.deleteSnapshot(path, "snap1");
+    dfs.delete(path, true);
+  }
+
+  @Test
+  @TestDir
+  @TestJetty
+  @TestHdfs
+  public void testGetSnapshotDiffIllegalParam() throws Exception {
+    createHttpFSServer(false, false);
+    // Create a test directory
+    String pathStr = "/tmp/tmp-snap-diff-exc-test";
+    createDirWithHttp(pathStr, "700", null);
+
+    Path path = new Path(pathStr);
+    DistributedFileSystem dfs = (DistributedFileSystem) FileSystem.get(
+        path.toUri(), TestHdfsHelper.getHdfsConf());
+    // Enable snapshot
+    dfs.allowSnapshot(path);
+    Assert.assertTrue(dfs.getFileStatus(path).isSnapshotEnabled());
+    // Send requests with GETSNAPSHOTDIFF API
+    // Snapshots snap1 and snap2 are not created, expect failures but not NPE
+    HttpURLConnection conn = sendRequestGetSnapshotDiff(pathStr, "", "");
+    Assert.assertNotEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    sendRequestGetSnapshotDiff(pathStr, "snap1", "");
+    Assert.assertNotEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    sendRequestGetSnapshotDiff(pathStr, "", "snap2");
+    Assert.assertNotEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    sendRequestGetSnapshotDiff(pathStr, "snap1", "snap2");
+    Assert.assertNotEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
+    // Clean up
+    dfs.delete(path, true);
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to