Modified: 
hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_16.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_16.q.out?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_16.q.out 
(original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_16.q.out 
Thu Aug  7 00:21:45 2014
@@ -1,3 +1,99 @@
+PREHOOK: query: EXPLAIN 
+SELECT   cstring1,
+         cdouble,
+         ctimestamp1,
+         (cdouble - 9763215.5639),
+         (-((cdouble - 9763215.5639))),
+         COUNT(cdouble),
+         STDDEV_SAMP(cdouble),
+         (-(STDDEV_SAMP(cdouble))),
+         (STDDEV_SAMP(cdouble) * COUNT(cdouble)),
+         MIN(cdouble),
+         (9763215.5639 / cdouble),
+         (COUNT(cdouble) / -1.389),
+         STDDEV_SAMP(cdouble)
+FROM     alltypesorc
+WHERE    ((cstring2 LIKE '%b%')
+          AND ((cdouble >= -1.389)
+              OR (cstring1 < 'a')))
+GROUP BY cstring1, cdouble, ctimestamp1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN 
+SELECT   cstring1,
+         cdouble,
+         ctimestamp1,
+         (cdouble - 9763215.5639),
+         (-((cdouble - 9763215.5639))),
+         COUNT(cdouble),
+         STDDEV_SAMP(cdouble),
+         (-(STDDEV_SAMP(cdouble))),
+         (STDDEV_SAMP(cdouble) * COUNT(cdouble)),
+         MIN(cdouble),
+         (9763215.5639 / cdouble),
+         (COUNT(cdouble) / -1.389),
+         STDDEV_SAMP(cdouble)
+FROM     alltypesorc
+WHERE    ((cstring2 LIKE '%b%')
+          AND ((cdouble >= -1.389)
+              OR (cstring1 < 'a')))
+GROUP BY cstring1, cdouble, ctimestamp1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 1521 Data size: 377237 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: ((cstring2 like '%b%') and ((cdouble >= (- 1.389)) or 
(cstring1 < 'a'))) (type: boolean)
+              Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+              Select Operator
+                expressions: cstring1 (type: string), cdouble (type: double), 
ctimestamp1 (type: timestamp)
+                outputColumnNames: cstring1, cdouble, ctimestamp1
+                Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(cdouble), stddev_samp(cdouble), 
min(cdouble)
+                  keys: cstring1 (type: string), cdouble (type: double), 
ctimestamp1 (type: timestamp)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                  Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: 
double), _col2 (type: timestamp)
+                    sort order: +++
+                    Map-reduce partition columns: _col0 (type: string), _col1 
(type: double), _col2 (type: timestamp)
+                    Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                    value expressions: _col3 (type: bigint), _col4 (type: 
struct<count:bigint,sum:double,variance:double>), _col5 (type: double)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0), stddev_samp(VALUE._col1), 
min(VALUE._col2)
+          keys: KEY._col0 (type: string), KEY._col1 (type: double), KEY._col2 
(type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: string), _col1 (type: double), _col2 
(type: timestamp), (_col1 - 9763215.5639) (type: double), (- (_col1 - 
9763215.5639)) (type: double), _col3 (type: bigint), _col4 (type: double), (- 
_col4) (type: double), (_col4 * _col3) (type: double), _col5 (type: double), 
(9763215.5639 / _col1) (type: double), (_col3 / (- 1.389)) (type: double), 
_col4 (type: double)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12
+            Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: SELECT   cstring1,
          cdouble,
          ctimestamp1,

Modified: 
hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_9.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_9.q.out?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_9.q.out 
(original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/vectorization_9.q.out 
Thu Aug  7 00:21:45 2014
@@ -1,3 +1,99 @@
+PREHOOK: query: EXPLAIN 
+SELECT   cstring1,
+         cdouble,
+         ctimestamp1,
+         (cdouble - 9763215.5639),
+         (-((cdouble - 9763215.5639))),
+         COUNT(cdouble),
+         STDDEV_SAMP(cdouble),
+         (-(STDDEV_SAMP(cdouble))),
+         (STDDEV_SAMP(cdouble) * COUNT(cdouble)),
+         MIN(cdouble),
+         (9763215.5639 / cdouble),
+         (COUNT(cdouble) / -1.389),
+         STDDEV_SAMP(cdouble)
+FROM     alltypesorc
+WHERE    ((cstring2 LIKE '%b%')
+          AND ((cdouble >= -1.389)
+              OR (cstring1 < 'a')))
+GROUP BY cstring1, cdouble, ctimestamp1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN 
+SELECT   cstring1,
+         cdouble,
+         ctimestamp1,
+         (cdouble - 9763215.5639),
+         (-((cdouble - 9763215.5639))),
+         COUNT(cdouble),
+         STDDEV_SAMP(cdouble),
+         (-(STDDEV_SAMP(cdouble))),
+         (STDDEV_SAMP(cdouble) * COUNT(cdouble)),
+         MIN(cdouble),
+         (9763215.5639 / cdouble),
+         (COUNT(cdouble) / -1.389),
+         STDDEV_SAMP(cdouble)
+FROM     alltypesorc
+WHERE    ((cstring2 LIKE '%b%')
+          AND ((cdouble >= -1.389)
+              OR (cstring1 < 'a')))
+GROUP BY cstring1, cdouble, ctimestamp1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: alltypesorc
+            Statistics: Num rows: 1521 Data size: 377237 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: ((cstring2 like '%b%') and ((cdouble >= (- 1.389)) or 
(cstring1 < 'a'))) (type: boolean)
+              Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+              Select Operator
+                expressions: cstring1 (type: string), cdouble (type: double), 
ctimestamp1 (type: timestamp)
+                outputColumnNames: cstring1, cdouble, ctimestamp1
+                Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count(cdouble), stddev_samp(cdouble), 
min(cdouble)
+                  keys: cstring1 (type: string), cdouble (type: double), 
ctimestamp1 (type: timestamp)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                  Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: 
double), _col2 (type: timestamp)
+                    sort order: +++
+                    Map-reduce partition columns: _col0 (type: string), _col1 
(type: double), _col2 (type: timestamp)
+                    Statistics: Num rows: 506 Data size: 125497 Basic stats: 
COMPLETE Column stats: NONE
+                    value expressions: _col3 (type: bigint), _col4 (type: 
struct<count:bigint,sum:double,variance:double>), _col5 (type: double)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0), stddev_samp(VALUE._col1), 
min(VALUE._col2)
+          keys: KEY._col0 (type: string), KEY._col1 (type: double), KEY._col2 
(type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+          Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+          Select Operator
+            expressions: _col0 (type: string), _col1 (type: double), _col2 
(type: timestamp), (_col1 - 9763215.5639) (type: double), (- (_col1 - 
9763215.5639)) (type: double), _col3 (type: bigint), _col4 (type: double), (- 
_col4) (type: double), (_col4 * _col3) (type: double), _col5 (type: double), 
(9763215.5639 / _col1) (type: double), (_col3 / (- 1.389)) (type: double), 
_col4 (type: double)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12
+            Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 253 Data size: 62748 Basic stats: COMPLETE 
Column stats: NONE
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: SELECT   cfloat,
          cstring1,
          cint,

Modified: 
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- 
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
 (original)
+++ 
hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
 Thu Aug  7 00:21:45 2014
@@ -594,7 +594,7 @@ public abstract class ThriftCLIService e
 
   private boolean isKerberosAuthMode() {
     return 
cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
-        .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString());
+        .equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString());
   }
 
 }

Modified: 
hive/branches/cbo/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- 
hive/branches/cbo/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
 (original)
+++ 
hive/branches/cbo/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
 Thu Aug  7 00:21:45 2014
@@ -25,6 +25,7 @@ import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.security.AccessControlException;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -43,6 +44,7 @@ import javax.security.auth.login.LoginEx
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.DefaultFileAccess;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -52,6 +54,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.ProxyFileSystem;
 import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.io.LongWritable;
@@ -880,4 +883,10 @@ public class Hadoop20Shims implements Ha
     LOG.debug(ArrayUtils.toString(command));
     shell.run(command);
   }
+
+  @Override
+  public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+      throws IOException, AccessControlException, Exception {
+    DefaultFileAccess.checkFileAccess(fs, stat, action);
+  }
 }

Modified: 
hive/branches/cbo/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- 
hive/branches/cbo/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
 (original)
+++ 
hive/branches/cbo/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
 Thu Aug  7 00:21:45 2014
@@ -19,10 +19,15 @@ package org.apache.hadoop.hive.shims;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URI;
+import java.security.AccessControlException;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.List;
@@ -32,6 +37,7 @@ import org.apache.commons.lang.ArrayUtil
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.DefaultFileAccess;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -651,6 +657,24 @@ public class Hadoop23Shims extends Hadoo
         }
       };
     }
+
+    /**
+     * Proxy file system also needs to override the access() method behavior.
+     * Cannot add Override annotation since FileSystem.access() may not exist 
in
+     * the version of hadoop used to build Hive.
+     */
+    public void access(Path path, FsAction action) throws 
AccessControlException,
+        FileNotFoundException, IOException, Exception {
+      Path underlyingFsPath = swizzleParamPath(path);
+      FileStatus underlyingFsStatus = fs.getFileStatus(underlyingFsPath);
+      if (accessMethod != null) {
+          accessMethod.invoke(fs, underlyingFsPath, action);
+      } else {
+        // If the FS has no access() method, we can try DefaultFileAccess ..
+        UserGroupInformation ugi = getUGIForConf(getConf());
+        DefaultFileAccess.checkFileAccess(fs, underlyingFsStatus, action);
+      }
+    }
   }
 
   @Override
@@ -709,4 +733,50 @@ public class Hadoop23Shims extends Hadoo
   public void getMergedCredentials(JobConf jobConf) throws IOException {
     
jobConf.getCredentials().mergeAll(UserGroupInformation.getCurrentUser().getCredentials());
   }
+
+  protected static final Method accessMethod;
+
+  static {
+    Method m = null;
+    try {
+      m = FileSystem.class.getMethod("access", Path.class, FsAction.class);
+    } catch (NoSuchMethodException err) {
+      // This version of Hadoop does not support FileSystem.access().
+    }
+    accessMethod = m;
+  }
+
+  @Override
+  public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+      throws IOException, AccessControlException, Exception {
+    try {
+      if (accessMethod == null) {
+        // Have to rely on Hive implementation of filesystem permission checks.
+        DefaultFileAccess.checkFileAccess(fs, stat, action);
+      } else {
+        accessMethod.invoke(fs, stat.getPath(), action);
+      }
+    } catch (Exception err) {
+      throw wrapAccessException(err);
+    }
+  }
+
+  /**
+   * If there is an AccessException buried somewhere in the chain of failures, 
wrap the original
+   * exception in an AccessException. Othewise just return the original 
exception.
+   */
+  private static Exception wrapAccessException(Exception err) {
+    final int maxDepth = 20;
+    Throwable curErr = err;
+    for (int idx = 0; curErr != null && idx < maxDepth; ++idx) {
+      if (curErr instanceof org.apache.hadoop.security.AccessControlException
+          || curErr instanceof 
org.apache.hadoop.fs.permission.AccessControlException) {
+        Exception newErr = new AccessControlException(curErr.getMessage());
+        newErr.initCause(err);
+        return newErr;
+      }
+      curErr = curErr.getCause();
+    }
+    return err;
+  }
 }

Modified: 
hive/branches/cbo/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- 
hive/branches/cbo/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 (original)
+++ 
hive/branches/cbo/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 Thu Aug  7 00:21:45 2014
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.lang.reflect.Constructor;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.security.AccessControlException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -32,14 +33,19 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import javax.security.auth.login.LoginException;
+
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DefaultFileAccess;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -663,4 +669,10 @@ public abstract class HadoopShimsSecure 
     Collections.addAll(dedup, locations);
     return dedup.toArray(new String[dedup.size()]);
   }
+
+  @Override
+  public void checkFileAccess(FileSystem fs, FileStatus stat, FsAction action)
+      throws IOException, AccessControlException, Exception {
+    DefaultFileAccess.checkFileAccess(fs, stat, action);
+  }
 }

Modified: 
hive/branches/cbo/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1616379&r1=1616378&r2=1616379&view=diff
==============================================================================
--- 
hive/branches/cbo/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
 (original)
+++ 
hive/branches/cbo/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
 Thu Aug  7 00:21:45 2014
@@ -25,6 +25,7 @@ import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.nio.ByteBuffer;
+import java.security.AccessControlException;
 import java.security.PrivilegedExceptionAction;
 import java.util.Comparator;
 import java.util.List;
@@ -42,6 +43,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.InputSplit;
@@ -669,4 +671,18 @@ public interface HadoopShims {
 
   public void getMergedCredentials(JobConf jobConf) throws IOException;
 
+  /**
+   * Check if the configured UGI has access to the path for the given file 
system action.
+   * Method will return successfully if action is permitted. 
AccessControlExceptoin will
+   * be thrown if user does not have access to perform the action. Other 
exceptions may
+   * be thrown for non-access related errors.
+   * @param fs
+   * @param status
+   * @param action
+   * @throws IOException
+   * @throws AccessControlException
+   * @throws Exception
+   */
+  public void checkFileAccess(FileSystem fs, FileStatus status, FsAction 
action)
+      throws IOException, AccessControlException, Exception;
 }


Reply via email to