[ 
https://issues.apache.org/jira/browse/KNOX-2240?focusedWorklogId=407760&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-407760
 ]

ASF GitHub Bot logged work on KNOX-2240:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 23/Mar/20 08:20
            Start Date: 23/Mar/20 08:20
    Worklog Time Spent: 10m 
      Work Description: smolnar82 commented on pull request #296: KNOX-2240 - 
KnoxShell Custom Command for WEBHDFS Use
URL: https://github.com/apache/knox/pull/296#discussion_r396267407
 
 

 ##########
 File path: 
gateway-shell/src/main/java/org/apache/knox/gateway/shell/commands/WebHDFSCommand.java
 ##########
 @@ -0,0 +1,376 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.shell.commands;
+
+import java.io.Console;
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.knox.gateway.shell.CredentialCollectionException;
+import org.apache.knox.gateway.shell.CredentialCollector;
+import org.apache.knox.gateway.shell.KnoxSession;
+import org.apache.knox.gateway.shell.KnoxShellException;
+import org.apache.knox.gateway.shell.hdfs.Hdfs;
+import org.apache.knox.gateway.shell.hdfs.Status.Response;
+import org.apache.knox.gateway.shell.table.KnoxShellTable;
+import org.apache.knox.gateway.util.JsonUtils;
+import org.codehaus.groovy.tools.shell.Groovysh;
+
+public class WebHDFSCommand extends AbstractKnoxShellCommand {
+  private static final String KNOXMOUNTPOINTS = "__knoxmountpoints";
+  private Map<String, KnoxSession> sessions = new HashMap<>();
+
+  public WebHDFSCommand(Groovysh shell) {
+    super(shell, ":filesystem", ":fs");
+  }
+
+  @Override
+  public String getUsage() {
+    String usage = "Usage: \n" +
+                   "  :fs ls {target-path} \n" +
+                   "  :fs cat {target-path} \n" +
+                   "  :fs get {from-path} {to-path} \n" +
+                   "  :fs put {from-path} {tp-path} \n" +
+                   "  :fs rm {target-path} \n" +
+                   "  :fs mkdir {dir-path} \n";
+    return usage;
+  }
+
+  @Override
+  public Object execute(List<String> args) {
+    Map<String, String> mounts = getMountPoints();
+    if (args.isEmpty()) {
+      args.add("ls");
+    }
+    if (args.get(0).equalsIgnoreCase("mount")) {
+      String url = args.get(1);
+      String mountPoint = args.get(2);
+      KnoxSession session = establishSession(mountPoint, url);
+      if (session != null) {
+        mounts.put(mountPoint, url);
+        KnoxSession.persistMountPoints(mounts);
+        return url + " mounted as " + mountPoint;
+      }
+
+      return "Failed to mount " + url + " as " + mountPoint;
+    }
+    else if (args.get(0).equalsIgnoreCase("unmount")) {
+      String mountPoint = args.get(1);
+      sessions.remove(mountPoint);
+      mounts.remove(mountPoint);
+      KnoxSession.persistMountPoints(mounts);
+    }
+    else if (args.get(0).equalsIgnoreCase("mounts")) {
+      KnoxShellTable table = new KnoxShellTable();
+      table.header("Mount Point").header("Topology URL");
+      for (String mountPoint : mounts.keySet()) {
+        table.row().value(mountPoint).value(mounts.get(mountPoint));
+      }
+      return table;
+    }
+    else if (args.get(0).equalsIgnoreCase("ls")) {
+      String path = args.get(1);
+      try {
+        String directory;
+        String mountPoint = determineMountPoint(path);
+        if (mountPoint != null) {
+          KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+          if (session != null) {
+            directory = determineTargetPath(path, mountPoint);
+            String json = Hdfs.ls(session).dir(directory).now().getString();
+            Map<String,HashMap<String, ArrayList<HashMap<String, String>>>> 
map =
+                JsonUtils.getFileStatusesAsMap(json);
+            if (map != null) {
+              ArrayList<HashMap<String, String>> list = 
map.get("FileStatuses").get("FileStatus");
+              KnoxShellTable table = buildTableFromListStatus(directory, list);
+              return table;
+            }
+          }
+          else {
+            return "No session established for mountPoint: " + mountPoint + " 
Use :fs mount {topology-url} {mountpoint-name}";
+          }
+        }
+        else {
+          System.out.println("No mountpoint found. Use ':fs mount 
{topologyURL} {mountpoint}'.");
+        }
+      } catch (KnoxShellException | IOException e) {
+        e.printStackTrace();
+      }
+    }
+    else if (args.get(0).equalsIgnoreCase("put")) {
+      // Hdfs.put( session ).file( dataFile ).to( dataDir + "/" + dataFile 
).now()
+      // :fs put from-path to-path
+      String localFile = args.get(1);
+      String path = args.get(2);
+
+      String mountPoint = determineMountPoint(path);
+      KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+      if (session != null) {
+        String targetPath = determineTargetPath(path, mountPoint);
+        try {
+          boolean overwrite = false;
+          try {
+            Response response = Hdfs.status(session).file(targetPath).now();
+            if (response.exists()) {
+              if (collectClearInput(targetPath + " already exists would you 
like to overwrite (Y/n)").equalsIgnoreCase("y")) {
+                overwrite = true;
+              }
+            }
+          } catch (KnoxShellException e) {
+            // NOP
+          }
+          int permission = 755;
+          if (args.size() >= 4) {
+            permission = Integer.parseInt(args.get(3));
+          }
+          
Hdfs.put(session).file(localFile).to(targetPath).overwrite(overwrite).permission(permission).now().getString();
+        } catch (IOException e) {
+          e.printStackTrace();
+        }
+      }
+      else {
+        return "No session established for mountPoint: " + mountPoint + " Use 
:fs mount {topology-url} {mountpoint-name}";
+      }
+    }
+    else if (args.get(0).equalsIgnoreCase("rm")) {
+      // Hdfs.rm( session ).file( dataFile ).now()
+      // :fs rm target-path
+      String path = args.get(1);
+
+      String mountPoint = determineMountPoint(path);
+      KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+      if (session != null) {
+        String targetPath = determineTargetPath(path, mountPoint);
+        try {
+          Hdfs.rm(session).file(targetPath).now().getString();
+        } catch (KnoxShellException | IOException e) {
+          e.printStackTrace();
+        }
+      }
+      else {
+        return "No session established for mountPoint: " + mountPoint + " Use 
:fs mount {topology-url} {mountpoint-name}";
+      }
+    }
+    else if (args.get(0).equalsIgnoreCase("cat")) {
+      // println Hdfs.get( session ).from( dataDir + "/" + dataFile 
).now().string
+      // :fs cat target-path
+      String path = args.get(1);
+
+      String mountPoint = determineMountPoint(path);
+      KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+      if (session != null) {
+        String targetPath = determineTargetPath(path, mountPoint);
+        try {
+          String contents = 
Hdfs.get(session).from(targetPath).now().getString();
+          return contents;
+        } catch (KnoxShellException | IOException e) {
+          e.printStackTrace();
+        }
+      }
+      else {
+        return "No session established for mountPoint: " + mountPoint + " Use 
:fs mount {topology-url} {mountpoint-name}";
+      }
+    }
+    else if (args.get(0).equalsIgnoreCase("mkdir")) {
+      // println Hdfs.mkdir( session ).dir( directoryPath ).perm( "777" 
).now().string
+      // :fs mkdir target-path [perms]
+      String path = args.get(1);
+      String perms = null;
+      if (args.size() == 3) {
+        perms = args.get(2);
+      }
+
+      String mountPoint = determineMountPoint(path);
+      KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+      if (session != null) {
+        String targetPath = determineTargetPath(path, mountPoint);
+        try {
+          if (perms != null) {
+            
Hdfs.mkdir(sessions.get(mountPoint)).dir(targetPath).now().getString();
+          }
+          else {
+            Hdfs.mkdir(session).dir(targetPath).perm(perms).now().getString();
+          }
+          return "Successfully created directory: " + targetPath;
+        } catch (KnoxShellException | IOException e) {
+          e.printStackTrace();
+        }
+      }
+      else {
+        return "No session established for mountPoint: " + mountPoint + " Use 
:fs mount {topology-url} {mountpoint-name}";
+      }
+    }
+    else if (args.get(0).equalsIgnoreCase("get")) {
+      // println Hdfs.get( session ).from( dataDir + "/" + dataFile 
).now().string
+      // :fs get from-path [to-path]
+      String path = args.get(1);
+
+      String mountPoint = determineMountPoint(path);
+      KnoxSession session = getSessionForMountPoint(mounts, mountPoint);
+      if (session != null) {
+        String from = determineTargetPath(path, mountPoint);
+        String to = null;
+        if (args.size() > 2) {
+          to = args.get(2);
+        }
+        else {
+          to = System.getProperty("user.home") + File.separator +
+              path.substring(path.lastIndexOf(File.separator));
+        }
+        try {
+          
Hdfs.get(sessions.get(mountPoint)).from(from).file(to).now().getString();
+        } catch (KnoxShellException | IOException e) {
+          e.printStackTrace();
+        }
+      }
+      else {
+        return "No session established for mountPoint: " + mountPoint + " Use 
:fs mount {topology-url} {mountpoint-name}";
+      }
+    }
+    else {
+      System.out.println("Unknown filesystem command");
+      System.out.println(getUsage());
+    }
+    return "";
+  }
+
+  private KnoxSession getSessionForMountPoint(Map<String, String> mounts, 
String mountPoint) {
+    KnoxSession session = sessions.get(mountPoint);
+    if (session == null) {
+      String url = mounts.get(mountPoint);
+      if (url != null) {
+        session = establishSession(mountPoint, url);
+      }
+    }
+    return session;
+  }
+
+  private KnoxSession establishSession(String mountPoint, String url) {
+    CredentialCollector dlg;
+    try {
+      dlg = login();
+    } catch (CredentialCollectionException e) {
+      e.printStackTrace();
+      return null;
+    }
+    String username = dlg.name();
+    String password = new String(dlg.chars());
+    KnoxSession session = null;
+    try {
+      session = KnoxSession.login(url, username, password);
+      sessions.put(mountPoint, session);
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+    }
+    return session;
+  }
+
+  private String collectClearInput(String prompt) {
+    Console c = System.console();
+    if (c == null) {
+      System.err.println("No console.");
+      System.exit(1);
+    }
+
+    String value = c.readLine(prompt);
+
+    return value;
+  }
+
+  private String determineTargetPath(String path, String mountPoint) {
+    String directory = null;
+    if (path.startsWith("/")) {
+      directory = stripMountPoint(path, mountPoint);
+    }
+    return directory;
+  }
+
+  private String stripMountPoint(String path, String mountPoint) {
+    String newPath = path.replace("/" + mountPoint, "");
+    return newPath;
+  }
+
+  private String determineMountPoint(String path) {
+    String mountPoint = null;
+    if (path.startsWith("/")) {
+      // does the user supplied path starts at a root
+      // if so check for a mountPoint based on the first element of the path
+      String[] pathElements = path.split("/");
+      mountPoint = pathElements[1];
+    }
+    return mountPoint;
+  }
+
+  private KnoxShellTable buildTableFromListStatus(String directory, 
List<HashMap<String, String>> list) {
+    Calendar cal = Calendar.getInstance(TimeZone.getDefault(), 
Locale.getDefault());
+    KnoxShellTable table = new KnoxShellTable();
+    table.title(directory);
+    table.header("permission")
+      .header("owner")
+      .header("group")
+      .header("length")
+      .header("modtime")
+      .header("name");
+
+    for (Map<String, String> map : list) {
+      cal.setTimeInMillis(Long.parseLong(map.get("modificationTime")));
+      table.row()
+        .value(map.get("permission"))
+        .value(map.get("owner"))
+        .value(map.get("group"))
+        .value(map.get("length"))
+        .value(cal.getTime())
+        .value(map.get("pathSuffix"));
+    }
+
+    return table;
+  }
+
+  @SuppressWarnings("unchecked")
+  protected Map<String, String> getMountPoints() {
+    Map<String, String> mounts = (Map<String, String>) 
getVariables().get(KNOXMOUNTPOINTS);
+    if (mounts == null) {
+      try {
+        mounts = KnoxSession.loadMountPoints();
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+      if (mounts != null) {
+        getVariables().put(KNOXMOUNTPOINTS, mounts);
+      }
+      else {
+        mounts = new HashMap<>();
+      }
+    }
+    return mounts;
+  }
+
+  public static void main(String[] args) {
+    WebHDFSCommand cmd = new WebHDFSCommand(new Groovysh());
+    List<String> args2 = new ArrayList<>();
+    cmd.execute(args2);
 
 Review comment:
   `args` is not passed. You may want to change it to:
   ```
   cmd.execute(new ArrayList<>(Arrays.asList(args)));
   ```
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


Issue Time Tracking
-------------------

    Worklog Id:     (was: 407760)
    Time Spent: 1h 10m  (was: 1h)

> KnoxShell Custom Command for WEBHDFS Use
> ----------------------------------------
>
>                 Key: KNOX-2240
>                 URL: https://issues.apache.org/jira/browse/KNOX-2240
>             Project: Apache Knox
>          Issue Type: Improvement
>          Components: KnoxShell
>            Reporter: Larry McCay
>            Assignee: Larry McCay
>            Priority: Major
>             Fix For: 1.4.0
>
>          Time Spent: 1h 10m
>  Remaining Estimate: 0h
>
> I'd like to extend the KnoxShell environment to have a Custom Command for 
> interacting with Hadoop FileSystems that support the WebHDFS REST API. We 
> will wrap the use of the KnoxShell HDFS classes for posix like commands in 
> the shell environment.
> In order to interact with multiple webhdfs filesystems we introduce the 
> notion of mounting a webhdfs service from a Knox topology URL. For instance, 
> see the following mount command to mount such a service to a mount point:
> {code}
> :fs mount https://knoxhost.example.com/topology/cdp-proxy-api retail
> {code}
> The above mounts a Hadoop filesystem to a mount point called "retail".
> Given this mounted filesystem we are able to interact with it through the 
> mountpoint as the root.
> {code}
> :fs ls /retail/user/lmccay
> {code}
> The above will list the contents of my home directory from the mounted 
> filesystem as well as return the listing as a KnoxShellTable that can be 
> sorted, selected, filtered, etc as any other KnoxShellTable.
> Additional commands
> {code}
> :fs mkdir /retail/user/lmccay/tmp 755
> :fs cat /retail/user/lmccay/tmp/README
> :fs put /home/lmccay/README /retail/user/lmccay/tmp/README
> :fs get /retail/user/lmccay/tmp/README /home/lmccay/README
> :fs rename /retail/user/lmccay/tmp/README /retail/user/lmccay/tmp/README2
> :fs rm /retail/user/lmccay/tmp/README
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to