Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/TestWritableSerialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/TestWritableSerialization.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/TestWritableSerialization.java
 Thu Jan  7 22:04:37 2010
@@ -22,10 +22,22 @@
 import static org.apache.hadoop.io.TestGenericWritable.CONF_TEST_VALUE;
 import junit.framework.TestCase;
 
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.TestGenericWritable.Foo;
+import org.apache.hadoop.io.TestGenericWritable.Bar;
 import org.apache.hadoop.io.TestGenericWritable.Baz;
 import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable;
+import org.apache.hadoop.io.serializer.DeserializerBase;
+import org.apache.hadoop.io.serializer.SerializationBase;
+import org.apache.hadoop.io.serializer.SerializerBase;
+import org.apache.hadoop.util.GenericsUtil;
 
 public class TestWritableSerialization extends TestCase {
 
@@ -53,4 +65,118 @@
     assertEquals(baz, result);
     assertNotNull(result.getConf());
   }
+
+  @SuppressWarnings("unchecked")
+  public void testIgnoreMisconfiguredMetadata() throws IOException {
+    // If SERIALIZATION_KEY is set, still need class name.
+
+    Configuration conf = new Configuration();
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put(SerializationBase.SERIALIZATION_KEY,
+        WritableSerialization.class.getName());
+    SerializationFactory factory = new SerializationFactory(conf);
+    SerializationBase serialization = factory.getSerialization(metadata);
+    assertNull("Got serializer without any class info", serialization);
+
+    metadata.put(SerializationBase.CLASS_KEY,
+        Text.class.getName());
+    serialization = factory.getSerialization(metadata);
+    assertNotNull("Didn't get serialization!", serialization);
+    assertTrue("Wrong serialization class",
+        serialization instanceof WritableSerialization);
+  }
+
+  @SuppressWarnings("unchecked")
+  public void testReuseSerializer() throws IOException {
+    // Test that we can write multiple objects of the same type
+    // through the same serializer.
+
+    DataOutputBuffer out = new DataOutputBuffer();
+    SerializationFactory factory = new SerializationFactory(
+        new Configuration());
+
+    // Create a few Foo objects and serialize them.
+    Foo foo = new Foo();
+    Foo foo2 = new Foo();
+    Map<String, String> metadata = SerializationBase.getMetadataFromClass(
+        GenericsUtil.getClass(foo));
+
+    SerializerBase fooSerializer = factory.getSerializer(metadata);
+    fooSerializer.open(out);
+    fooSerializer.serialize(foo);
+    fooSerializer.serialize(foo2);
+    fooSerializer.close();
+
+    out.reset();
+
+    // Create a new serializer for Bar objects
+    Bar bar = new Bar();
+    Baz baz = new Baz(); // Baz inherits from Bar.
+    metadata = SerializationBase.getMetadataFromClass(
+        GenericsUtil.getClass(bar));
+    // Check that we can serialize Bar objects.
+    SerializerBase barSerializer = factory.getSerializer(metadata);
+    barSerializer.open(out);
+    barSerializer.serialize(bar); // this should work.
+    try {
+      // This should not work. We should not allow subtype serialization.
+      barSerializer.serialize(baz);
+      fail("Expected IOException serializing baz via bar serializer.");
+    } catch (IOException ioe) {
+      // Expected.
+    }
+
+    try {
+      // This should not work. Disallow unrelated type serialization.
+      barSerializer.serialize(foo);
+      fail("Expected IOException serializing foo via bar serializer.");
+    } catch (IOException ioe) {
+      // Expected.
+    }
+
+    barSerializer.close();
+    out.reset();
+  }
+
+
+  // Test the SerializationBase.checkSerializationKey() method.
+  class DummySerializationBase extends SerializationBase<Object> {
+    public boolean accept(Map<String, String> metadata) {
+      return checkSerializationKey(metadata);
+    }
+
+    public SerializerBase<Object> getSerializer(Map<String, String> metadata) {
+      return null;
+    }
+
+    public DeserializerBase<Object> getDeserializer(Map<String, String> 
metadata) {
+      return null;
+    }
+
+    public RawComparator<Object> getRawComparator(Map<String, String> 
metadata) {
+      return null;
+    }
+  }
+
+  public void testSerializationKeyCheck() {
+    DummySerializationBase dummy = new DummySerializationBase();
+    Map<String, String> metadata = new HashMap<String, String>();
+
+    assertTrue("Didn't accept empty metadata", dummy.accept(metadata));
+
+    metadata.put(SerializationBase.SERIALIZATION_KEY,
+        DummySerializationBase.class.getName());
+    assertTrue("Didn't accept valid metadata", dummy.accept(metadata));
+
+    metadata.put(SerializationBase.SERIALIZATION_KEY, "foo");
+    assertFalse("Accepted invalid metadata", dummy.accept(metadata));
+
+    try {
+      dummy.accept((Map<String, String>) null);
+      // Shouldn't get here!
+      fail("Somehow didn't actually test the method we expected");
+    } catch (NullPointerException npe) {
+      // expected this.
+    }
+  }
 }

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
 Thu Jan  7 22:04:37 2010
@@ -26,12 +26,38 @@
 import org.apache.avro.util.Utf8;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.serializer.SerializationBase;
+import org.apache.hadoop.io.serializer.SerializationFactory;
 import org.apache.hadoop.io.serializer.SerializationTestUtil;
 
 public class TestAvroSerialization extends TestCase {
 
   private static final Configuration conf = new Configuration();
 
+  @SuppressWarnings("unchecked")
+  public void testIgnoreMisconfiguredMetadata() {
+    // If SERIALIZATION_KEY is set, still need class name.
+
+    Configuration conf = new Configuration();
+    Map<String, String> metadata = new HashMap<String, String>();
+    SerializationFactory factory = new SerializationFactory(conf);
+    SerializationBase serialization = null;
+
+    metadata.put(SerializationBase.SERIALIZATION_KEY,
+        AvroGenericSerialization.class.getName());
+    serialization = factory.getSerialization(metadata);
+    assertNull("Got serializer without any class info", serialization);
+
+    metadata.put(SerializationBase.SERIALIZATION_KEY,
+        AvroReflectSerialization.class.getName());
+    serialization = factory.getSerialization(metadata);
+    assertNull("Got serializer without any class info", serialization);
+
+    metadata.put(SerializationBase.SERIALIZATION_KEY,
+        AvroSpecificSerialization.class.getName());
+    serialization = factory.getSerialization(metadata);
+    assertNull("Got serializer without any class info", serialization);
+  }
+
   public void testSpecific() throws Exception {
     AvroRecord before = new AvroRecord();
     before.intField = 5;

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
 Thu Jan  7 22:04:37 2010
@@ -61,14 +61,16 @@
 
   public void testCalls() throws Exception {
     Configuration conf = new Configuration();
-    Server server = AvroRpc.getServer(new TestImpl(), ADDRESS, 0, conf);
+    RPC.setProtocolEngine(conf, AvroTestProtocol.class, AvroRpcEngine.class);
+    Server server = RPC.getServer(AvroTestProtocol.class,
+                                  new TestImpl(), ADDRESS, 0, conf);
     AvroTestProtocol proxy = null;
     try {
       server.start();
 
       InetSocketAddress addr = NetUtils.getConnectAddress(server);
       proxy =
-        (AvroTestProtocol)AvroRpc.getProxy(AvroTestProtocol.class, addr, conf);
+        (AvroTestProtocol)RPC.getProxy(AvroTestProtocol.class, 0, addr, conf);
       
       proxy.ping();
 

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestIPCServerResponder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestIPCServerResponder.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestIPCServerResponder.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestIPCServerResponder.java
 Thu Jan  7 22:04:37 2010
@@ -114,6 +114,12 @@
     }
   }
 
+  public void testResponseBuffer() throws Exception {
+    Server.INITIAL_RESP_BUF_SIZE = 1;
+    Server.MAX_RESP_BUF_SIZE = 1;
+    testServerResponder(1, true, 1, 1, 5);
+  }
+
   public void testServerResponder() throws Exception {
     testServerResponder(10, true, 1, 10, 200);
   }

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestRPC.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestRPC.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/ipc/TestRPC.java
 Thu Jan  7 22:04:37 2010
@@ -190,7 +190,8 @@
   public void testSlowRpc() throws Exception {
     System.out.println("Testing Slow RPC");
     // create a server with two handlers
-    Server server = RPC.getServer(new TestImpl(), ADDRESS, 0, 2, false, conf);
+    Server server = RPC.getServer(TestProtocol.class,
+                                  new TestImpl(), ADDRESS, 0, 2, false, conf);
     TestProtocol proxy = null;
     
     try {
@@ -230,9 +231,9 @@
     }
   }
 
-
   public void testCalls(Configuration conf) throws Exception {
-    Server server = RPC.getServer(new TestImpl(), ADDRESS, 0, conf);
+    Server server = RPC.getServer(TestProtocol.class,
+                                  new TestImpl(), ADDRESS, 0, conf);
     TestProtocol proxy = null;
     try {
     server.start();
@@ -306,8 +307,8 @@
     assertTrue(Arrays.equals(strings, new String[]{"a","b"}));
 
     Method ping = TestProtocol.class.getMethod("ping", new Class[] {});
-    Object[] voids = (Object[])RPC.call(ping, new Object[][]{{},{}},
-                                        new InetSocketAddress[] {addr, addr}, 
conf);
+    Object[] voids = RPC.call(ping, new Object[][]{{},{}},
+                              new InetSocketAddress[] {addr, addr}, conf);
     assertEquals(voids, null);
     } finally {
       server.stop();
@@ -339,7 +340,8 @@
   private void doRPCs(Configuration conf, boolean expectFailure) throws 
Exception {
     SecurityUtil.setPolicy(new ConfiguredPolicy(conf, new 
TestPolicyProvider()));
     
-    Server server = RPC.getServer(new TestImpl(), ADDRESS, 0, 5, true, conf);
+    Server server = RPC.getServer(TestProtocol.class,
+                                  new TestImpl(), ADDRESS, 0, 5, true, conf);
 
     TestProtocol proxy = null;
 

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/TestUnixUserGroupInformation.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/TestUnixUserGroupInformation.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/TestUnixUserGroupInformation.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/TestUnixUserGroupInformation.java
 Thu Jan  7 22:04:37 2010
@@ -18,11 +18,21 @@
 
 package org.apache.hadoop.security;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.TestWritable;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.security.auth.Subject;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.TestWritable;
+import org.apache.hadoop.util.Shell;
+
 /** Unit tests for UnixUserGroupInformation */
 public class TestUnixUserGroupInformation extends TestCase {
   final private static String USER_NAME = "user1";
@@ -31,6 +41,7 @@
   final private static String GROUP3_NAME = "group3";
   final private static String[] GROUP_NAMES = 
                       new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME};
+  final private static Configuration conf = new Configuration();
   
   /** Test login method */
   public void testLogin() throws Exception {
@@ -63,7 +74,6 @@
     testConstructorFailures(null, GROUP_NAMES);
     testConstructorFailures("", GROUP_NAMES);
     testConstructorFailures(USER_NAME, null);
-    testConstructorFailures(USER_NAME, new String[0]);
     testConstructorFailures(USER_NAME, new String[]{null});
     testConstructorFailures(USER_NAME, new String[]{""});
     testConstructorFailures(USER_NAME, new String[]{GROUP1_NAME, null});
@@ -98,6 +108,69 @@
   public void testWritable() throws Exception {
     UnixUserGroupInformation ugi = new UnixUserGroupInformation(
         USER_NAME, GROUP_NAMES);
-    TestWritable.testWritable(ugi, new Configuration());
+    TestWritable.testWritable(ugi, conf);
+  }
+  
+  /**
+   * given user name - get all the groups.
+   */
+  public void testGetServerSideGroups() throws IOException {
+    
+    // get the user name
+    Process pp = Runtime.getRuntime().exec("whoami");
+    BufferedReader br = new BufferedReader(new 
InputStreamReader(pp.getInputStream()));
+    String userName = br.readLine().trim();
+    // get the groups
+    pp = Runtime.getRuntime().exec("id -Gn");
+    br = new BufferedReader(new InputStreamReader(pp.getInputStream()));
+    String line = br.readLine();
+    System.out.println(userName + ":" + line);
+   
+    List<String> groups = new ArrayList<String> ();    
+    for(String s: line.split("[\\s]")) {
+      groups.add(s);
+    }
+    
+    boolean ugiIsIn = false;
+    
+    // get groups on the server side
+    int numberOfGroups = 0;
+    Subject subject = SecurityUtil.getSubject(conf, userName);
+    System.out.println("for user="+userName+" prinicipals are:");
+    for(Principal p : subject.getPrincipals()) {
+      if(p instanceof User) {
+        System.out.println("USER: " + p.getName());
+        assertTrue("user name is not the same as in the Subject: " + 
p.getName(),
+            userName.equals(p.getName()));
+      }
+      if(p instanceof Group) {
+        numberOfGroups++;
+        System.out.println("GROUP: " + p.getName());
+        assertTrue("Subject contains invalid group " + p.getName(), 
+            groups.contains(p.getName()));
+      }
+      if(p instanceof UserGroupInformation) {
+        System.out.println("UGI: " + p.getName());
+        ugiIsIn = true;
+      }
+    }
+    assertTrue("UGI object is not in the Subject", ugiIsIn);
+    assertEquals("number of groups in subject doesn't match actual # groups", 
+        numberOfGroups, groups.size());
+    
+    // negative test - get Subject for non-existing user
+    // should return empty groups
+    subject = SecurityUtil.getSubject(conf, "fakeUser");
+    for(Principal p : subject.getPrincipals()) {
+      if(p instanceof User) {
+        System.out.println("USER: " + p.getName());
+        assertTrue("user name (fakeUser) is not the same as in the Subject: " +
+            p.getName(), "fakeUser".equals(p.getName()));
+      }
+      if(p instanceof Group) {
+        fail("fakeUser should have no groups");
+      }
+    }
+    
   }
 }

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java
 Thu Jan  7 22:04:37 2010
@@ -17,7 +17,13 @@
  */
 package org.apache.hadoop.security.authorize;
 
-import java.security.Permission;
+import java.security.CodeSource;
+import java.security.CodeSigner;
+import java.security.PermissionCollection;
+import java.security.ProtectionDomain;
+import java.net.URL;
+import java.net.NetPermission;
+import java.net.MalformedURLException;
 
 import javax.security.auth.Subject;
 
@@ -36,7 +42,7 @@
   
   private static final String KEY_1 = "test.policy.1";
   private static final String KEY_2 = "test.policy.2";
-  
+
   public static class Protocol1 {
     int i;
   }
@@ -55,11 +61,7 @@
   }
   
   public void testConfiguredPolicy() throws Exception {
-    Configuration conf = new Configuration();
-    conf.set(KEY_1, AccessControlList.WILDCARD_ACL_VALUE);
-    conf.set(KEY_2, USER1 + " " + GROUPS1[0]);
-    
-    ConfiguredPolicy policy = new ConfiguredPolicy(conf, new 
TestPolicyProvider());
+    ConfiguredPolicy policy = createConfiguredPolicy();
     SecurityUtil.setPolicy(policy);
     
     Subject user1 = 
@@ -79,4 +81,60 @@
     }
     assertTrue(failed);
   }
+
+  /**
+   * Create a configured policy with some keys
+   * @return a new configured policy
+   */
+  private ConfiguredPolicy createConfiguredPolicy() {
+    Configuration conf = new Configuration();
+    conf.set(KEY_1, AccessControlList.WILDCARD_ACL_VALUE);
+    conf.set(KEY_2, USER1 + " " + GROUPS1[0]);
+
+    return new ConfiguredPolicy(conf, new TestPolicyProvider());
+  }
+
+  /**
+   * Create a test code source against a test URL
+   * @return a new code source
+   * @throws MalformedURLException
+   */
+  private CodeSource createCodeSource() throws MalformedURLException {
+    return new CodeSource(new URL("http://hadoop.apache.org";),
+        (CodeSigner[]) null);
+  }
+
+  /**
+   * Assert that a permission collection can have new permissions added
+   * @param permissions the collection to check
+   */
+  private void assertWritable(PermissionCollection permissions) {
+    assertFalse(permissions.isReadOnly());
+    NetPermission netPermission = new NetPermission("something");
+    permissions.add(netPermission);
+  }
+
+  /**
+   * test that the {...@link PermissionCollection} returned by
+   * {...@link ConfiguredPolicy#getPermissions(CodeSource)} is writeable
+   * @throws Throwable on any failure
+   */
+  public void testPolicyWritable() throws Throwable {
+    ConfiguredPolicy policy = createConfiguredPolicy();
+    CodeSource source = createCodeSource();
+    PermissionCollection permissions = policy.getPermissions(source);
+    assertWritable(permissions);
+  }
+
+  /**
+   * test that the {...@link PermissionCollection} returned by
+   * {...@link ConfiguredPolicy#getPermissions(CodeSource)} is writeable
+   * @throws Throwable on any failure
+   */
+  public void testProtectionDomainPolicyWritable() throws Throwable {
+    ConfiguredPolicy policy = createConfiguredPolicy();
+    CodeSource source = createCodeSource();
+    PermissionCollection permissions = policy.getPermissions(new 
ProtectionDomain(source, null));
+    assertWritable(permissions);
+  }
 }

Modified: 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=897029&r1=897028&r2=897029&view=diff
==============================================================================
--- 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java
 (original)
+++ 
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/util/TestGenericOptionsParser.java
 Thu Jan  7 22:04:37 2010
@@ -19,22 +19,23 @@
 
 import java.io.File;
 import java.io.FileNotFoundException;
+import java.io.IOException;
 import java.net.URI;
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import junit.framework.TestCase;
-
 public class TestGenericOptionsParser extends TestCase {
-  private static File testDir = 
-    new File(System.getProperty("test.build.data", "/tmp"), "generic");
+  File testDir;
+  Configuration conf;
+  FileSystem localFs;
+    
   
   public void testFilesOption() throws Exception {
-    Configuration conf = new Configuration();
     File tmpFile = new File(testDir, "tmpfile");
-    FileSystem localFs = FileSystem.getLocal(conf);
     Path tmpPath = new Path(tmpFile.toString());
     localFs.create(tmpPath);
     String[] args = new String[2];
@@ -74,7 +75,62 @@
       th instanceof FileNotFoundException);
     files = conf2.get("tmpfiles");
     assertNull("files is not null", files);
-    testDir.delete();
+  }
+  
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+    conf = new Configuration();
+    localFs = FileSystem.getLocal(conf);
+    testDir = new File(System.getProperty("test.build.data", "/tmp"), 
"generic");
+    if(testDir.exists())
+      localFs.delete(new Path(testDir.toString()), true);
   }
 
+  @Override
+  protected void tearDown() throws Exception {
+    super.tearDown();
+    if(testDir.exists()) {
+      localFs.delete(new Path(testDir.toString()), true);
+    }
+  }
+
+  /**
+   * testing -fileCache option
+   * @throws IOException
+   */
+  public void testTokenCacheOption() throws IOException {
+    FileSystem localFs = FileSystem.getLocal(conf);
+    
+    File tmpFile = new File(testDir, "tokenCacheFile");
+    if(tmpFile.exists()) {
+      tmpFile.delete();
+    }
+    String[] args = new String[2];
+    // pass a files option 
+    args[0] = "-tokenCacheFile";
+    args[1] = tmpFile.toString();
+    
+    // test non existing file
+    Throwable th = null;
+    try {
+      new GenericOptionsParser(conf, args);
+    } catch (Exception e) {
+      th = e;
+    }
+    assertNotNull(th);
+    assertTrue("FileNotFoundException is not thrown",
+        th instanceof FileNotFoundException);
+    
+    // create file
+    Path tmpPath = new Path(tmpFile.toString());
+    localFs.create(tmpPath);
+    new GenericOptionsParser(conf, args);
+    String fileName = conf.get("tokenCacheFile");
+    assertNotNull("files is null", fileName);
+    assertEquals("files option does not match",
+      localFs.makeQualified(tmpPath).toString(), fileName);
+    
+    localFs.delete(new Path(testDir.getAbsolutePath()), true);
+  }
 }


Reply via email to