Author: acmurthy
Date: Wed Jan  2 01:19:11 2008
New Revision: 608050

URL: http://svn.apache.org/viewvc?rev=608050&view=rev
Log:
HADOOP-2344. Enhance the utility for executing shell commands to read the
stdout/stderr streams while waiting for the command to finish (to free up
the buffers). Also, this patch throws away stderr of the DF utility.
@deprecated
  org.apache.hadoop.fs.ShellCommand for org.apache.hadoop.util.Shell
  org.apache.hadoop.util.ShellUtil for 
org.apache.hadoop.util.Shell.ShellCommandExecutor

Added:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Shell.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
    
lucene/hadoop/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ShellUtil.java
    
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
    
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystemPermission.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Jan  2 01:19:11 2008
@@ -306,6 +306,14 @@
     HADOOP-2492. Additional debugging in the rpc server to better 
     diagnose ConcurrentModificationException. (dhruba)
 
+    HADOOP-2344. Enhance the utility for executing shell commands to read the
+    stdout/stderr streams while waiting for the command to finish (to free up
+    the buffers). Also, this patch throws away stderr of the DF utility.
+    @deprecated 
+      org.apache.hadoop.fs.ShellCommand for org.apache.hadoop.util.Shell
+      org.apache.hadoop.util.ShellUtil for 
+        org.apache.hadoop.util.Shell.ShellCommandExecutor
+
 Release 0.15.2 - 2008-01-02
 
   BUG FIXES

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java Wed Jan  2 
01:19:11 2008
@@ -24,10 +24,11 @@
 import java.util.StringTokenizer;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Shell;
 
 /** Filesystem disk space usage statistics.  Uses the unix 'df' program.
  * Tested on Linux, FreeBSD, Cygwin. */
-public class DF extends ShellCommand {
+public class DF extends Shell {
   public static final long DF_INTERVAL_DEFAULT = 3 * 1000; // default DF 
refresh interval 
   
   private String  dirPath;
@@ -95,7 +96,9 @@
   }
 
   protected String[] getExecString() {
-    return new String[] {"df","-k", dirPath};
+    // ignoring the error since the exit code it enough
+    return new String[] {"bash","-c","exec 'df' '-k' '" + dirPath 
+                         + "' 2>/dev/null"};
   }
   
   protected void parseExecResult(BufferedReader lines) throws IOException {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java Wed Jan  2 
01:19:11 2008
@@ -23,9 +23,10 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.dfs.FSConstants;
+import org.apache.hadoop.util.Shell;
 
 /** Filesystem disk space usage statistics.  Uses the unix 'du' program*/
-public class DU extends ShellCommand {
+public class DU extends Shell {
   private String  dirPath;
 
   private long used;

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Wed Jan  2 
01:19:11 2008
@@ -25,6 +25,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Shell;
 
 /**
  * A collection of file-processing util methods
@@ -257,7 +258,7 @@
   /**
    * This class is only used on windows to invoke the cygpath command.
    */
-  private static class CygPathCommand extends ShellCommand {
+  private static class CygPathCommand extends Shell {
     String[] command;
     String result;
     CygPathCommand(String path) throws IOException {

Modified: 
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java 
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java 
Wed Jan  2 01:19:11 2008
@@ -28,6 +28,7 @@
 import org.apache.hadoop.fs.permission.*;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Shell;
 
 /****************************************************************
  * Implement the FileSystem API for the raw local filesystem.
@@ -399,7 +400,7 @@
       try {
         StringTokenizer t = new StringTokenizer(
             execCommand(new File(getPath().toUri()), 
-                        ShellCommand.getGET_PERMISSION_COMMAND()));
+                        Shell.getGET_PERMISSION_COMMAND()));
         //expected format
         //-rw-------    1 username groupname ...
         String permission = t.nextToken();
@@ -449,7 +450,7 @@
     //[OWNER][:[GROUP]]
     String s = (username == null? "": username)
              + (groupname == null? "": ":" + groupname);
-    execCommand(pathToFile(p), ShellCommand.SET_OWNER_COMMAND, s);
+    execCommand(pathToFile(p), Shell.SET_OWNER_COMMAND, s);
   }
 
   /**
@@ -458,7 +459,7 @@
   @Override
   public void setPermission(Path p, FsPermission permission
       ) throws IOException {
-    execCommand(pathToFile(p), ShellCommand.SET_PERMISSION_COMMAND,
+    execCommand(pathToFile(p), Shell.SET_PERMISSION_COMMAND,
         String.format("%04o", permission.toShort()));
   }
 
@@ -466,7 +467,7 @@
     String[] args = new String[cmd.length + 1];
     System.arraycopy(cmd, 0, args, 0, cmd.length);
     args[cmd.length] = f.getCanonicalPath();
-    String output = ShellCommand.execCommand(args);
+    String output = Shell.execCommand(args);
     return output;
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java 
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java Wed Jan 
 2 01:19:11 2008
@@ -17,111 +17,9 @@
  */
 package org.apache.hadoop.fs;
 
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
+import org.apache.hadoop.util.Shell;
 
-/** A base class for running a unix command like du or df*/
-abstract public class ShellCommand {
-  /** a Unix command to get the current user's name */
-  public final static String USER_NAME_COMMAND = "whoami";
-  /** a Unix command to get the current user's groups list */
-  public static String[] getGROUPS_COMMAND() {
-    return new String[]{"bash", "-c", "groups"};
-  }
-  /** a Unix command to set permission */
-  public static final String SET_PERMISSION_COMMAND = "chmod";
-  /** a Unix command to set owner */
-  public static final String SET_OWNER_COMMAND = "chown";
-  /** Return a Unix command to get permission information. */
-  public static String[] getGET_PERMISSION_COMMAND() {
-    return new String[]{"ls", "-ld"};
-  }
-
-  private long    interval;   // refresh interval in msec
-  private long    lastTime;   // last time the command was performed
-  
-  ShellCommand() {
-    this(0L);
-  }
-  
-  ShellCommand( long interval ) {
-    this.interval = interval;
-    this.lastTime = (interval<0) ? 0 : -interval;
-  }
-  
-  /** check to see if a command needs to be execuated */
-  protected void run() throws IOException {
-    if (lastTime + interval > System.currentTimeMillis())
-      return;
-    runCommand();
-  }
-
-  /** Run a command */
-  private void runCommand() throws IOException { 
-    Process process;
-    process = Runtime.getRuntime().exec(getExecString());
-
-    try {
-      if (process.waitFor() != 0) {
-        throw new IOException
-          (new BufferedReader(new InputStreamReader(process.getErrorStream()))
-           .readLine());
-      }
-      parseExecResult(new BufferedReader(
-          new InputStreamReader(process.getInputStream())));
-    } catch (InterruptedException e) {
-      throw new IOException(e.toString());
-    } finally {
-      process.destroy();
-      lastTime = System.currentTimeMillis();
-    }
-  }
-
-  /** return an array comtaining the command name & its parameters */ 
-  protected abstract String[] getExecString();
-  
-  /** Parse the execution result */
-  protected abstract void parseExecResult(BufferedReader lines)
-  throws IOException;
-
-  /// A simple implementation of Command
-  private static class SimpleCommandExecutor extends ShellCommand {
-    
-    private String[] command;
-    private StringBuffer reply;
-    
-    SimpleCommandExecutor(String[] execString) {
-      command = execString;
-    }
-
-    @Override
-    protected String[] getExecString() {
-      return command;
-    }
-
-    @Override
-    protected void parseExecResult(BufferedReader lines) throws IOException {
-      reply = new StringBuffer();
-      char[] buf = new char[512];
-      int nRead;
-      while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
-        reply.append(buf, 0, nRead);
-      }
-    }
-    
-    String getReply() {
-      return (reply == null) ? "" : reply.toString();
-    }
-  }
-  
-  /** 
-   * Static method to execute a command. Covers most of the simple cases 
-   * without requiring the user to implement Command interface.
-   */
-  public static String execCommand(String ... cmd) throws IOException {
-    SimpleCommandExecutor exec = new SimpleCommandExecutor(cmd);
-    exec.run();
-    return exec.getReply();
-  }
-}
+/** A base class for running a unix command like du or df.
+ * @deprecated Use [EMAIL PROTECTED] Shell} instead.
+ */
+abstract public class ShellCommand extends Shell {}

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java 
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Wed 
Jan  2 01:19:11 2008
@@ -20,6 +20,7 @@
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.filecache.*;
 import org.apache.hadoop.util.*;
@@ -38,7 +39,7 @@
     LogFactory.getLog("org.apache.hadoop.mapred.TaskRunner");
 
   volatile boolean killed = false;
-  private Process process;
+  private ShellCommandExecutor shexec; // shell terminal for running the task
   private Task t;
   private TaskTracker tracker;
 
@@ -440,57 +441,27 @@
   }
 
   /**
-   * Append the contents of the input stream to the output file. Both streams 
-   * are closed upon exit.
-   * @param in the stream to read
-   * @param outName the filename to append the data to
-   * @throws IOException if something goes wrong
-   */
-  private void copyStream(InputStream in, File outName) throws IOException {
-    try {
-      OutputStream out = new FileOutputStream(outName, true);
-      try {
-        byte[] buffer = new byte[1024];
-        int len = in.read(buffer);
-        while (len > 0) {
-          out.write(buffer, 0, len);
-          len = in.read(buffer);
-        }
-      } finally {
-        out.close();
-      }
-    } finally {
-      in.close();
-    }
-  }
-
-  /**
    * Run the child process
    */
   private void runChild(List<String> args, File dir,
                         String taskid) throws IOException {
 
     try {
-      ShellUtil shexec = new ShellUtil(args, dir, System.getenv());
+      shexec = new ShellCommandExecutor(args.toArray(new String[0]), dir);
       shexec.execute();
-      process = shexec.getProcess();
+    } catch (IOException ioe) {
+      // do nothing
+      // error and output are appropriately redirected
+    } finally { // handle the exit code
       int exit_code = shexec.getExitCode();
      
       if (!killed && exit_code != 0) {
         if (exit_code == 65) {
           tracker.getTaskTrackerMetrics().taskFailedPing();
         }
-        copyStream(process.getInputStream(), 
-                   TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDOUT));
-        copyStream(process.getErrorStream(), 
-                   TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDERR));
         throw new IOException("Task process exit with nonzero status of " +
                               exit_code + ".");
       }
-    } catch (InterruptedException e) {
-      throw new IOException(e.toString());
-    } finally {
-      kill();      
     }
   }
 
@@ -498,8 +469,11 @@
    * Kill the child process
    */
   public void kill() {
-    if (process != null) {
-      process.destroy();
+    if (shexec != null) {
+      Process process = shexec.getProcess();
+      if (process != null) {
+        process.destroy();
+      }
     }
     killed = true;
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java 
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Wed 
Jan  2 01:19:11 2008
@@ -58,6 +58,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
@@ -75,7 +76,6 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.RunJar;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.ShellUtil;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.log4j.LogManager;
 
@@ -1637,22 +1637,13 @@
      * @throws IOException
      */
     public void runScript(List<String> args, File dir) throws IOException {
-      Process process = null;
-      try {
-        ShellUtil shexec = new ShellUtil(args, dir, System.getenv());
-        shexec.execute();
-        process = shexec.getProcess();
-        int exit_code = shexec.getExitCode();
-        if (exit_code != 0) {
-          throw new IOException("Task debug script exit with nonzero "
-                                +"status of " + exit_code + ".");
-        }
-      } catch (InterruptedException e) {
-          throw new IOException(e.toString());
-      } finally {
-        if (process != null) {
-          process.destroy();
-        }
+      ShellCommandExecutor shexec = 
+              new ShellCommandExecutor(args.toArray(new String[0]), dir);
+      shexec.execute();
+      int exitCode = shexec.getExitCode();
+      if (exitCode != 0) {
+        throw new IOException("Task debug script exit with nonzero status of " 
+                              + exitCode + ".");
       }
     }
 

Modified: 
lucene/hadoop/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
 (original)
+++ 
lucene/hadoop/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
 Wed Jan  2 01:19:11 2008
@@ -29,7 +29,7 @@
 import javax.security.auth.login.LoginException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.ShellCommand;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableUtils;
 
@@ -289,10 +289,10 @@
    */
   static String getUnixUserName() throws IOException {
     String[] result = executeShellCommand(
-        new String[]{ShellCommand.USER_NAME_COMMAND});
+        new String[]{Shell.USER_NAME_COMMAND});
     if (result.length!=1) {
       throw new IOException("Expect one token as the result of " + 
-          ShellCommand.USER_NAME_COMMAND + ": " + toString(result));
+          Shell.USER_NAME_COMMAND + ": " + toString(result));
     }
     return result[0];
   }
@@ -303,13 +303,13 @@
    * @throws IOException if encounter any error when running the command
    */
   private static String[] getUnixGroups() throws IOException {
-    return executeShellCommand(ShellCommand.getGROUPS_COMMAND());
+    return executeShellCommand(Shell.getGROUPS_COMMAND());
   }
   
   /* Execute a command and return the result as an array of Strings */
   private static String[] executeShellCommand(String[] command)
   throws IOException {
-    String groups = ShellCommand.execCommand(command);
+    String groups = Shell.execCommand(command);
     StringTokenizer tokenizer = new StringTokenizer(groups);
     int numOfTokens = tokenizer.countTokens();
     String[] tokens = new String[numOfTokens];

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Shell.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Shell.java?rev=608050&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Shell.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Shell.java Wed Jan  2 
01:19:11 2008
@@ -0,0 +1,268 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import java.util.Map;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+/** 
+ * A base class for running a Unix command.
+ * 
+ * <code>Shell</code> can be used to run unix commands like <code>du</code> or
+ * <code>df</code>. It also offers facilities to gate commands by 
+ * time-intervals.
+ */
+abstract public class Shell {
+  
+  public static final Log LOG = LogFactory.getLog(Shell.class);
+  
+  /** a Unix command to get the current user's name */
+  public final static String USER_NAME_COMMAND = "whoami";
+  /** a Unix command to get the current user's groups list */
+  public static String[] getGROUPS_COMMAND() {
+    return new String[]{"bash", "-c", "groups"};
+  }
+  /** a Unix command to set permission */
+  public static final String SET_PERMISSION_COMMAND = "chmod";
+  /** a Unix command to set owner */
+  public static final String SET_OWNER_COMMAND = "chown";
+  /** Return a Unix command to get permission information. */
+  public static String[] getGET_PERMISSION_COMMAND() {
+    return new String[]{"ls", "-ld"};
+  }
+
+  private long    interval;   // refresh interval in msec
+  private long    lastTime;   // last time the command was performed
+  private Map<String, String> environment; // env for the command execution
+  private File dir;
+  private Process process; // sub process used to execute the command
+  private int exitCode;
+  
+  public Shell() {
+    this(0L);
+  }
+  
+  /**
+   * @param Interval the minimum duration to wait before re-executing the 
+   * command.
+   */
+  public Shell( long interval ) {
+    this.interval = interval;
+    this.lastTime = (interval<0) ? 0 : -interval;
+  }
+  
+  /** set the environment for the command 
+   * @param env Mapping of environment variables
+   */
+  protected void setEnvironment(Map<String, String> env) {
+    this.environment = env;
+  }
+
+  /** set the working directory 
+   * @param dir The directory where the command would be executed
+   */
+  protected void setWorkingDirectory(File dir) {
+    this.dir = dir;
+  }
+
+  /** check to see if a command needs to be executed and execute if needed */
+  protected void run() throws IOException {
+    if (lastTime + interval > System.currentTimeMillis())
+      return;
+    exitCode = 0; // reset for next run
+    runCommand();
+  }
+
+  /** Run a command */
+  private void runCommand() throws IOException { 
+    ProcessBuilder builder = new ProcessBuilder(getExecString());
+    boolean completed = false;
+    
+    if (environment != null) {
+      builder.environment().putAll(this.environment);
+    }
+    if (dir != null) {
+      builder.directory(this.dir);
+    }
+    
+    process = builder.start();
+    final BufferedReader errReader = 
+            new BufferedReader(new InputStreamReader(process
+                                                     .getErrorStream()));
+    BufferedReader inReader = 
+            new BufferedReader(new InputStreamReader(process
+                                                     .getInputStream()));
+    final StringBuffer errMsg = new StringBuffer();
+    
+    // read error and input streams as this would free up the buffers
+    // free the error stream buffer
+    Thread errThread = new Thread() {
+      @Override
+      public void run() {
+        try {
+          String line = errReader.readLine();
+          while((line != null) && !isInterrupted()) {
+            errMsg.append(line);
+            errMsg.append(System.getProperty("line.seperator"));
+            line = errReader.readLine();
+          }
+        } catch(IOException ioe) {
+          LOG.warn("Error reading the error stream", ioe);
+        }
+      }
+    };
+    try {
+      errThread.start();
+    } catch (IllegalStateException ise) { }
+    try {
+      parseExecResult(inReader); // parse the output
+      // clear the input stream buffer
+      String line = inReader.readLine();
+      while(line != null) { 
+        line = inReader.readLine();
+      }
+      // wait for the process to finish and check the exit code
+      exitCode = process.waitFor();
+      if (exitCode != 0) {
+        if (errMsg.length() == 0) {
+          errMsg.append("Command exit with status code " + exitCode);
+        }
+        throw new IOException(errMsg.toString());
+      }
+      completed = true;
+    } catch (InterruptedException ie) {
+      throw new IOException(ie.toString());
+    } finally {
+      // close the input stream
+      try {
+        inReader.close();
+      } catch (IOException ioe) {
+        LOG.warn("Error while closing the input stream", ioe);
+      }
+      if (completed) {
+        try {
+          // make sure that the error thread exits
+          errThread.join();
+        } catch (InterruptedException ie) {
+          LOG.warn("Interrupted while reading the error stream", ie);
+        }
+      } else {
+        errThread.interrupt();
+      }
+      try {
+        errReader.close();
+      } catch (IOException ioe) {
+        LOG.warn("Error while closing the error stream", ioe);
+      }
+      process.destroy();
+      lastTime = System.currentTimeMillis();
+    }
+  }
+
+  /** return an array containing the command name & its parameters */ 
+  protected abstract String[] getExecString();
+  
+  /** Parse the execution result */
+  protected abstract void parseExecResult(BufferedReader lines)
+  throws IOException;
+
+  /** get the current sub-process executing the given command 
+   * @return process executing the command
+   */
+  public Process getProcess() {
+    return process;
+  }
+
+  /** get the exit code 
+   * @return the exit code of the process
+   */
+  public int getExitCode() {
+    return exitCode;
+  }
+
+  /**
+   * A simple shell command executor.
+   * 
+   * <code>ShellCommandExecutor</code>should be used in cases where the output 
+   * of the command needs no explicit parsing and where the command, working 
+   * directory and the environment remains unchanged. The output of the 
command 
+   * is stored as-is and is expected to be small.
+   */
+  public static class ShellCommandExecutor extends Shell {
+    
+    private String[] command;
+    private StringBuffer output;
+    
+    public ShellCommandExecutor(String[] execString) {
+      command = execString.clone();
+    }
+
+    public ShellCommandExecutor(String[] execString, File dir) {
+      this(execString);
+      this.setWorkingDirectory(dir);
+    }
+
+    public ShellCommandExecutor(String[] execString, File dir, 
+                                 Map<String, String> env) {
+      this(execString, dir);
+      this.setEnvironment(env);
+    }
+    
+    /** Execute the shell command. */
+    public void execute() throws IOException {
+      this.run();    
+    }
+
+    protected String[] getExecString() {
+      return command;
+    }
+
+    protected void parseExecResult(BufferedReader lines) throws IOException {
+      output = new StringBuffer();
+      char[] buf = new char[512];
+      int nRead;
+      while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
+        output.append(buf, 0, nRead);
+      }
+    }
+    
+    /** Get the output of the shell command.*/
+    public String getOutput() {
+      return (output == null) ? "" : output.toString();
+    }
+  }
+  
+  /** 
+   * Static method to execute a shell command. 
+   * Covers most of the simple cases without requiring the user to implement  
+   * the <code>Shell</code> interface.
+   * @param cmd shell command to execute.
+   * @return the output of the executed command.
+   */
+  public static String execCommand(String ... cmd) throws IOException {
+    ShellCommandExecutor exec = new ShellCommandExecutor(cmd);
+    exec.execute();
+    return exec.getOutput();
+  }
+}

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ShellUtil.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ShellUtil.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ShellUtil.java 
(original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ShellUtil.java Wed Jan  
2 01:19:11 2008
@@ -5,28 +5,24 @@
 import java.io.File;
 import java.io.IOException;
 
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+
 /**
  *  Class to execute a shell.
- *
+ *  @deprecated Use [EMAIL PROTECTED] ShellCommandExecutor} instead.
  */
-
 public class ShellUtil {
-    private List<String> exec_cmd;
-    private File working_dir;
-    private Process process;
-    private int exit_code;
-    private Map<String, String> environment;
-
+  
+  ShellCommandExecutor shexec; // shell to execute a command
+  
     /**
      * @param args list containing command and command line arguments
      * @param dir Current working directory
-     * @throws IOException 
-     * @throws InterruptedException
-        */
+     * @param env Environment for the command
+     */
     public ShellUtil (List<String> args, File dir, Map<String, String> env) {
-      exec_cmd = args;
-      working_dir = dir;
-      environment  = env;
+      shexec = new ShellCommandExecutor(args.toArray(new String[0]), dir, 
+                                         env);
     }
        
     /**
@@ -34,28 +30,21 @@
      * @throws IOException
      * @throws InterruptedException
      */
-    public void execute() throws IOException, InterruptedException {
-      // start the process and wait for it to execute 
-      ProcessBuilder builder = new ProcessBuilder(exec_cmd);
-      builder.directory(working_dir);
-      if (environment != null) {
-        builder.environment().putAll(environment);
-      }
-      process = builder.start();
-      exit_code = process.waitFor();
-      
+    public void execute() throws IOException {
+      // start the process and wait for it to execute
+      shexec.execute();
     }
     /**
      * @return process
      */
     public Process getProcess() {
-      return process;
+      return shexec.getProcess();
     }
 
     /**
      * @return exit-code of the process
      */
     public int getExitCode() {
-      return exit_code;
+      return shexec.getExitCode();
    }
 }

Modified: 
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java 
(original)
+++ 
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java 
Wed Jan  2 01:19:11 2008
@@ -27,7 +27,7 @@
 import java.util.zip.CRC32;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.ShellCommand;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.UTF8;
@@ -72,7 +72,7 @@
                  FileUtil.makeShellPath(dataDir) + "' ; tar -xf -)";
     LOG.info("Unpacking the tar file. Cmd : " + cmd);
     String[] shellCmd = { "bash", "-c", cmd };
-    ShellCommand.execCommand(shellCmd);
+    Shell.execCommand(shellCmd);
     
     //Now read the reference info
     

Modified: 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java 
(original)
+++ 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java 
Wed Jan  2 01:19:11 2008
@@ -21,6 +21,7 @@
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Shell;
 
 import junit.framework.TestCase;
 
@@ -90,7 +91,7 @@
       validateTempDirCreation(1);
       validateTempDirCreation(1);
     } finally {
-      ShellCommand.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
       rmBufferDirs();
     }
   }
@@ -108,7 +109,7 @@
       validateTempDirCreation(2);
       validateTempDirCreation(2);
     } finally {
-      ShellCommand.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
       rmBufferDirs();
     }
   }

Modified: 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
URL: 
http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystemPermission.java?rev=608050&r1=608049&r2=608050&view=diff
==============================================================================
--- 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
 (original)
+++ 
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestLocalFileSystemPermission.java
 Wed Jan  2 01:19:11 2008
@@ -20,6 +20,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.*;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Shell;
 
 import java.io.*;
 import java.util.*;
@@ -138,7 +139,7 @@
 
   static List<String> getGroups() throws IOException {
     List<String> a = new ArrayList<String>();
-    String s = ShellCommand.execCommand(ShellCommand.getGROUPS_COMMAND());
+    String s = Shell.execCommand(Shell.getGROUPS_COMMAND());
     for(StringTokenizer t = new StringTokenizer(s); t.hasMoreTokens(); ) {
       a.add(t.nextToken());
     }


Reply via email to