This is an automated email from the ASF dual-hosted git repository.

mwalch pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/master by this push:
     new c265ea5  Remove 'accumulo-util hadoop-jar' command (#872)
c265ea5 is described below

commit c265ea5b16171032419b164e809f5478f70bbba8
Author: Mike Walch <mwa...@apache.org>
AuthorDate: Sat Jan 5 17:54:17 2019 -0500

    Remove 'accumulo-util hadoop-jar' command (#872)
    
    * Command doesn't work well now that Accumulo does
      not match Hadoop's dependenices
    * Users should create shaded jar instead and submit
      using Hadoop's 'yarn jar' command
    * Command was used by StandaloneClusterControl but only
      by unit test
---
 assemble/bin/accumulo-util                         | 69 ----------------------
 .../standalone/StandaloneClusterControl.java       | 37 +-----------
 .../standalone/StandaloneClusterControlTest.java   | 49 ---------------
 3 files changed, 2 insertions(+), 153 deletions(-)

diff --git a/assemble/bin/accumulo-util b/assemble/bin/accumulo-util
index d243cd4..13eb802 100755
--- a/assemble/bin/accumulo-util
+++ b/assemble/bin/accumulo-util
@@ -22,7 +22,6 @@ Usage: accumulo-util <command> (<argument> ...)
 Commands:
   build-native        Builds Accumulo native libraries
   dump-zoo            Dumps data in ZooKeeper
-  hadoop-jar          Runs 'hadoop jar' command with Accumulo jars
   gen-monitor-cert    Generates Accumulo monitor certficate
   load-jars-hdfs      Loads Accumulo jars in lib/ to HDFS for VFS classloader
   
@@ -182,71 +181,6 @@ function load_jars_hdfs() {
   "$HADOOP" fs -rm "$SYSTEM_CONTEXT_HDFS_DIR/slf4j*.jar"  > /dev/null
 }
 
-function hadoop_jar() {
-  if [[ -x "$HADOOP_HOME/bin/hadoop" ]]; then
-    HADOOP="$HADOOP_HOME/bin/hadoop"
-  else
-    HADOOP=$(which hadoop)
-  fi
-  if [[ ! -x "$HADOOP" ]]; then
-    echo "Could not find 'hadoop' command. Please set hadoop on your PATH or 
set HADOOP_HOME"
-    exit 1
-  fi
-  if [[ -z "$ZOOKEEPER_HOME" ]]; then
-     echo "ZOOKEEPER_HOME must be set!"
-     exit 1
-  fi
-
-  ZOOKEEPER_CMD="ls -1 $ZOOKEEPER_HOME/zookeeper-[0-9]*[^csn].jar "
-  if [[ $(eval "$ZOOKEEPER_CMD" | wc -l) -ne 1 ]] ; then
-     echo "Not exactly one zookeeper jar in $ZOOKEEPER_HOME"
-     exit 1
-  fi
-  ZOOKEEPER_LIB=$(eval "$ZOOKEEPER_CMD")
-
-  CORE_LIB="${lib}/accumulo-core.jar"
-  THRIFT_LIB="${lib}/libthrift.jar"
-  JCOMMANDER_LIB="${lib}/jcommander.jar"
-  COMMONS_VFS_LIB="${lib}/commons-vfs2.jar"
-  GUAVA_LIB="${lib}/guava.jar"
-  HTRACE_LIB="${lib}/htrace-core.jar"
-
-  USERJARS=" "
-  for arg in "$@"; do
-      if [[ "$arg" != "-libjars" ]] && [[ -z "$TOOLJAR" ]]; then
-        TOOLJAR="$arg"
-        shift
-     elif [[ "$arg" != "-libjars" ]] && [[ -z "$CLASSNAME" ]]; then
-        CLASSNAME="$arg"
-        shift
-     elif [[ -z "$USERJARS" ]]; then
-        USERJARS=$(echo "$arg" | tr "," " ")
-        shift
-     elif [[ "$arg" = "-libjars" ]]; then
-        USERJARS=""
-        shift
-     else
-        break
-     fi
-  done
-
-  
LIB_JARS="$THRIFT_LIB,$CORE_LIB,$ZOOKEEPER_LIB,$JCOMMANDER_LIB,$COMMONS_VFS_LIB,$GUAVA_LIB,$HTRACE_LIB"
-  
H_JARS="$THRIFT_LIB:$CORE_LIB:$ZOOKEEPER_LIB:$JCOMMANDER_LIB:$COMMONS_VFS_LIB:$GUAVA_LIB:$HTRACE_LIB"
-
-  for jar in $USERJARS; do
-     LIB_JARS="$LIB_JARS,$jar"
-     H_JARS="$H_JARS:$jar"
-  done
-  export HADOOP_CLASSPATH="$H_JARS:$HADOOP_CLASSPATH"
-
-  if [[ -z "$CLASSNAME" || -z "$TOOLJAR" ]]; then
-     echo "Usage: accumulo-util hadoop-jar path/to/myTool.jar 
my.tool.class.Name [-libjars my1.jar,my2.jar]" 1>&2
-     exit 1
-  fi
-
-  exec "$HADOOP" jar "$TOOLJAR" "$CLASSNAME" -libjars "$LIB_JARS" "$@"
-}
-
 function main() {
   SOURCE="${BASH_SOURCE[0]}"
   while [ -h "${SOURCE}" ]; do
@@ -266,9 +200,6 @@ function main() {
     dump-zoo)
       "$bin"/accumulo org.apache.accumulo.server.util.DumpZookeeper "${@:2}"
       ;;
-    hadoop-jar)
-      hadoop_jar "${@:2}"
-      ;;
     gen-monitor-cert)
       gen_monitor_cert
       ;;
diff --git 
a/minicluster/src/main/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControl.java
 
b/minicluster/src/main/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControl.java
index c374e86..13c33f7 100644
--- 
a/minicluster/src/main/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControl.java
+++ 
b/minicluster/src/main/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControl.java
@@ -53,7 +53,7 @@ public class StandaloneClusterControl implements 
ClusterControl {
   private static final Logger log = 
LoggerFactory.getLogger(StandaloneClusterControl.class);
 
   private static final String ACCUMULO_SERVICE_SCRIPT = "accumulo-service",
-      ACCUMULO_SCRIPT = "accumulo", ACCUMULO_UTIL_SCRIPT = "accumulo-util";
+      ACCUMULO_SCRIPT = "accumulo";
   private static final String MASTER_HOSTS_FILE = "masters", GC_HOSTS_FILE = 
"gc",
       TSERVER_HOSTS_FILE = "tservers", TRACER_HOSTS_FILE = "tracers",
       MONITOR_HOSTS_FILE = "monitor";
@@ -65,7 +65,7 @@ public class StandaloneClusterControl implements 
ClusterControl {
   private String serverCmdPrefix;
   protected RemoteShellOptions options;
 
-  protected String accumuloServicePath, accumuloPath, accumuloUtilPath;
+  protected String accumuloServicePath, accumuloPath;
 
   @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN",
       justification = "code runs in same security context as user who provided 
input file name")
@@ -81,11 +81,6 @@ public class StandaloneClusterControl implements 
ClusterControl {
     File bin = new File(accumuloHome, "bin");
     this.accumuloServicePath = new File(bin, 
ACCUMULO_SERVICE_SCRIPT).getAbsolutePath();
     this.accumuloPath = new File(bin, ACCUMULO_SCRIPT).getAbsolutePath();
-    this.accumuloUtilPath = new File(bin, 
ACCUMULO_UTIL_SCRIPT).getAbsolutePath();
-  }
-
-  String getAccumuloUtilPath() {
-    return this.accumuloUtilPath;
   }
 
   protected Entry<Integer,String> exec(String hostname, String[] command) 
throws IOException {
@@ -133,34 +128,6 @@ public class StandaloneClusterControl implements 
ClusterControl {
     return msg.replaceAll("[\r\n]", "");
   }
 
-  public Entry<Integer,String> execMapreduceWithStdout(Class<?> clz, String[] 
args)
-      throws IOException {
-    String host = "localhost";
-    List<String> cmd = new ArrayList<>();
-    cmd.add(getAccumuloUtilPath());
-    cmd.add("hadoop-jar");
-    cmd.add(getJarFromClass(clz));
-    cmd.add(clz.getName());
-    for (String arg : args) {
-      cmd.add("'" + arg + "'");
-    }
-    log.info("Running: '{}' on {}", StringUtils.join(cmd, " "), host);
-    return exec(host, cmd.toArray(new String[cmd.size()]));
-  }
-
-  String getJarFromClass(Class<?> clz) {
-    CodeSource source = clz.getProtectionDomain().getCodeSource();
-    if (source == null) {
-      throw new RuntimeException("Could not get CodeSource for class");
-    }
-    URL jarUrl = source.getLocation();
-    String jar = jarUrl.getPath();
-    if (!jar.endsWith(".jar")) {
-      throw new RuntimeException("Need to have a jar to run mapreduce: " + 
jar);
-    }
-    return jar;
-  }
-
   @Override
   public void adminStopAll() throws IOException {
     String master = getHosts(MASTER_HOSTS_FILE).get(0);
diff --git 
a/minicluster/src/test/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControlTest.java
 
b/minicluster/src/test/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControlTest.java
index 1c31f89..5e23b83 100644
--- 
a/minicluster/src/test/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControlTest.java
+++ 
b/minicluster/src/test/java/org/apache/accumulo/cluster/standalone/StandaloneClusterControlTest.java
@@ -16,19 +16,10 @@
  */
 package org.apache.accumulo.cluster.standalone;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.aryEq;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
 
-import org.easymock.EasyMock;
 import org.junit.Test;
 
-import com.google.common.collect.Maps;
-
 public class StandaloneClusterControlTest {
 
   @Test
@@ -46,44 +37,4 @@ public class StandaloneClusterControlTest {
     assertEquals(accumuloHome + "/bin/accumulo", control.accumuloPath);
     assertEquals(accumuloHome + "/bin/accumulo-service", 
control.accumuloServicePath);
   }
-
-  @Test
-  public void mapreduceLaunchesLocally() throws Exception {
-    final String accumuloUtilPath = "/usr/lib/accumulo/bin/accumulo-util";
-    final String jar = "/home/user/my_project.jar";
-    final Class<?> clz = Object.class;
-    final String myClass = clz.getName();
-    StandaloneClusterControl control = 
EasyMock.createMockBuilder(StandaloneClusterControl.class)
-        .addMockedMethod("exec", String.class, String[].class)
-        
.addMockedMethod("getAccumuloUtilPath").addMockedMethod("getJarFromClass", 
Class.class)
-        .createMock();
-
-    final String[] toolArgs = {"-u", "user", "-p", "password"};
-    final String[] expectedCommands = new String[4 + toolArgs.length];
-
-    int i = 0;
-    expectedCommands[i++] = accumuloUtilPath;
-    expectedCommands[i++] = "hadoop-jar";
-    expectedCommands[i++] = jar;
-    expectedCommands[i++] = myClass;
-    for (int j = 0; j < toolArgs.length; j++) {
-      expectedCommands[i + j] = quote(toolArgs[j]);
-    }
-
-    expect(control.getAccumuloUtilPath()).andReturn(accumuloUtilPath);
-    expect(control.getJarFromClass(anyObject(Class.class))).andReturn(jar);
-    expect(control.exec(eq("localhost"), aryEq(expectedCommands)))
-        .andReturn(Maps.immutableEntry(0, ""));
-
-    replay(control);
-
-    // Give a fake Class -- we aren't verifying the actual class passed in
-    control.execMapreduceWithStdout(clz, toolArgs);
-
-    verify(control);
-  }
-
-  private String quote(String word) {
-    return "'" + word + "'";
-  }
 }

Reply via email to