Modified: 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
 Wed Jan 23 20:51:59 2013
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.Random;
 
 import org.apache.accumulo.core.cli.ClientOnRequiredTable;
+import org.apache.accumulo.core.client.BatchWriterConfig;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
@@ -63,7 +64,7 @@ import com.beust.jcommander.Parameter;
  * the same way TeraSort does use 10000000000 rows and 10/10 byte key length 
and 78/78 byte value length. Along with the 10 byte row id and \r\n this gives 
you
  * 100 byte row * 10000000000 rows = 1tb. Min/Max ranges for key and value 
parameters are inclusive/inclusive respectively.
  * 
- *  
+ * 
  */
 public class TeraSortIngest extends Configured implements Tool {
   /**
@@ -84,19 +85,23 @@ public class TeraSortIngest extends Conf
         rowCount = length;
       }
       
+      @Override
       public long getLength() throws IOException {
         return 0;
       }
       
+      @Override
       public String[] getLocations() throws IOException {
         return new String[] {};
       }
       
+      @Override
       public void readFields(DataInput in) throws IOException {
         firstRow = WritableUtils.readVLong(in);
         rowCount = WritableUtils.readVLong(in);
       }
       
+      @Override
       public void write(DataOutput out) throws IOException {
         WritableUtils.writeVLong(out, firstRow);
         WritableUtils.writeVLong(out, rowCount);
@@ -119,8 +124,10 @@ public class TeraSortIngest extends Conf
         totalRows = split.rowCount;
       }
       
+      @Override
       public void close() throws IOException {}
       
+      @Override
       public float getProgress() throws IOException {
         return finishedRows / (float) totalRows;
       }
@@ -148,6 +155,7 @@ public class TeraSortIngest extends Conf
       }
     }
     
+    @Override
     public RecordReader<LongWritable,NullWritable> 
createRecordReader(InputSplit split, TaskAttemptContext context) throws 
IOException {
       // reporter.setStatus("Creating record reader");
       return new RangeRecordReader((RangeInputSplit) split);
@@ -156,6 +164,7 @@ public class TeraSortIngest extends Conf
     /**
      * Create the desired number of splits, dividing the number of rows 
between the mappers.
      */
+    @Override
     public List<InputSplit> getSplits(JobContext job) {
       long totalRows = job.getConfiguration().getLong(NUMROWS, 0);
       int numSplits = job.getConfiguration().getInt(NUMSPLITS, 1);
@@ -305,6 +314,7 @@ public class TeraSortIngest extends Conf
         value.append(filler[(base + valuelen) % 26], 0, valuelen);
     }
     
+    @Override
     public void map(LongWritable row, NullWritable ignored, Context context) 
throws IOException, InterruptedException {
       context.setStatus("Entering");
       long rowId = row.get();
@@ -344,17 +354,17 @@ public class TeraSortIngest extends Conf
   }
   
   static class Opts extends ClientOnRequiredTable {
-    @Parameter(names="--count", description="number of rows to ingest", 
required=true)
+    @Parameter(names = "--count", description = "number of rows to ingest", 
required = true)
     long numRows;
-    @Parameter(names={"-nk", "--minKeySize"}, description="miniumum key size", 
required=true)
+    @Parameter(names = {"-nk", "--minKeySize"}, description = "miniumum key 
size", required = true)
     int minKeyLength;
-    @Parameter(names={"-xk", "--maxKeySize"}, description="maximum key size", 
required=true)
+    @Parameter(names = {"-xk", "--maxKeySize"}, description = "maximum key 
size", required = true)
     int maxKeyLength;
-    @Parameter(names={"-nv", "--minValueSize"}, description="minimum key 
size", required=true)
+    @Parameter(names = {"-nv", "--minValueSize"}, description = "minimum key 
size", required = true)
     int minValueLength;
-    @Parameter(names={"-xv", "--maxValueSize"}, description="maximum key 
size", required=true)
+    @Parameter(names = {"-xv", "--maxValueSize"}, description = "maximum key 
size", required = true)
     int maxValueLength;
-    @Parameter(names="--splits", description="number of splits to create in 
the table")
+    @Parameter(names = "--splits", description = "number of splits to create 
in the table")
     int splits = 0;
   }
   
@@ -374,7 +384,8 @@ public class TeraSortIngest extends Conf
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
     opts.setAccumuloConfigs(job);
-    AccumuloOutputFormat.setMaxMutationBufferSize(job.getConfiguration(), 10L 
* 1000 * 1000);
+    BatchWriterConfig bwConfig = new BatchWriterConfig().setMaxMemory(10L * 
1000 * 1000);
+    AccumuloOutputFormat.setBatchWriterOptions(job, bwConfig);
     
     Configuration conf = job.getConfiguration();
     conf.setLong(NUMROWS, opts.numRows);

Modified: 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
 Wed Jan 23 20:51:59 2013
@@ -41,17 +41,19 @@ import com.beust.jcommander.Parameter;
  */
 
 /**
- * A simple map reduce job that computes the unique column families and column 
qualifiers in a table.  This example shows one way to run against an offline 
table.
+ * A simple map reduce job that computes the unique column families and column 
qualifiers in a table. This example shows one way to run against an offline
+ * table.
  */
 public class UniqueColumns extends Configured implements Tool {
   
   private static final Text EMPTY = new Text();
   
-  public static class UMapper extends Mapper<Key,Value,Text,Text> {    
+  public static class UMapper extends Mapper<Key,Value,Text,Text> {
     private Text temp = new Text();
     private static final Text CF = new Text("cf:");
     private static final Text CQ = new Text("cq:");
     
+    @Override
     public void map(Key key, Value value, Context context) throws IOException, 
InterruptedException {
       temp.set(CF);
       ByteSequence cf = key.getColumnFamilyData();
@@ -66,21 +68,21 @@ public class UniqueColumns extends Confi
   }
   
   public static class UReducer extends Reducer<Text,Text,Text,Text> {
+    @Override
     public void reduce(Text key, Iterable<Text> values, Context context) 
throws IOException, InterruptedException {
       context.write(key, EMPTY);
     }
   }
   
   static class Opts extends ClientOnRequiredTable {
-    @Parameter(names="--output", description="output directory")
+    @Parameter(names = "--output", description = "output directory")
     String output;
-    @Parameter(names="--reducers", description="number of reducers to use", 
required=true)
+    @Parameter(names = "--reducers", description = "number of reducers to 
use", required = true)
     int reducers;
-    @Parameter(names="--offline", description="run against an offline table")
+    @Parameter(names = "--offline", description = "run against an offline 
table")
     boolean offline = false;
   }
   
-  
   @Override
   public int run(String[] args) throws Exception {
     Opts opts = new Opts();
@@ -90,7 +92,7 @@ public class UniqueColumns extends Confi
     
     Job job = new Job(getConf(), jobName);
     job.setJarByClass(this.getClass());
-
+    
     String clone = opts.tableName;
     Connector conn = null;
     if (opts.offline) {
@@ -104,11 +106,9 @@ public class UniqueColumns extends Confi
       conn.tableOperations().clone(opts.tableName, clone, true, new 
HashMap<String,String>(), new HashSet<String>());
       conn.tableOperations().offline(clone);
       
-      AccumuloInputFormat.setScanOffline(job.getConfiguration(), true);
+      AccumuloInputFormat.setOfflineTableScan(job, true);
     }
     
-
-    
     job.setInputFormatClass(AccumuloInputFormat.class);
     opts.setAccumuloConfigs(job);
     
@@ -118,9 +118,9 @@ public class UniqueColumns extends Confi
     
     job.setCombinerClass(UReducer.class);
     job.setReducerClass(UReducer.class);
-
+    
     job.setNumReduceTasks(opts.reducers);
-
+    
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, new Path(opts.output));
     
@@ -129,11 +129,10 @@ public class UniqueColumns extends Confi
     if (opts.offline) {
       conn.tableOperations().delete(clone);
     }
-
+    
     return job.isSuccessful() ? 0 : 1;
   }
   
-  
   public static void main(String[] args) throws Exception {
     int res = ToolRunner.run(CachedConfiguration.getInstance(), new 
UniqueColumns(), args);
     System.exit(res);

Modified: 
accumulo/branches/ACCUMULO-259/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- 
accumulo/branches/ACCUMULO-259/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
 Wed Jan 23 20:51:59 2013
@@ -25,14 +25,9 @@ import java.util.concurrent.TimeUnit;
 
 import junit.framework.TestCase;
 
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import 
org.apache.accumulo.core.client.mapreduce.InputFormatBase.RangeInputSplit;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -40,16 +35,24 @@ import org.apache.accumulo.core.data.Val
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.security.tokens.UserPassToken;
-import org.apache.accumulo.core.util.ContextFactory;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.log4j.Logger;
+import org.apache.accumulo.core.util.CachedConfiguration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 public class ChunkInputFormatTest extends TestCase {
-  private static final Logger log = Logger.getLogger(ChunkInputStream.class);
-  List<Entry<Key,Value>> data;
-  List<Entry<Key,Value>> baddata;
+  private static AssertionError e0 = null;
+  private static AssertionError e1 = null;
+  private static AssertionError e2 = null;
+  private static IOException e3 = null;
+  
+  private static final Authorizations AUTHS = new Authorizations("A", "B", 
"C", "D");
+  
+  private static List<Entry<Key,Value>> data;
+  private static List<Entry<Key,Value>> baddata;
   
   {
     data = new ArrayList<Entry<Key,Value>>();
@@ -74,7 +77,153 @@ public class ChunkInputFormatTest extend
     assertEquals(e1.getValue(), e2.getValue());
   }
   
-  public void test() throws IOException, InterruptedException, 
AccumuloException, AccumuloSecurityException, TableExistsException, 
TableNotFoundException {
+  public static class CIFTester extends Configured implements Tool {
+    public static class TestMapper extends 
Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> {
+      int count = 0;
+      
+      @Override
+      protected void map(List<Entry<Key,Value>> key, InputStream value, 
Context context) throws IOException, InterruptedException {
+        byte[] b = new byte[20];
+        int read;
+        try {
+          switch (count) {
+            case 0:
+              assertEquals(key.size(), 2);
+              entryEquals(key.get(0), data.get(0));
+              entryEquals(key.get(1), data.get(1));
+              assertEquals(read = value.read(b), 8);
+              assertEquals(new String(b, 0, read), "asdfjkl;");
+              assertEquals(read = value.read(b), -1);
+              break;
+            case 1:
+              assertEquals(key.size(), 2);
+              entryEquals(key.get(0), data.get(4));
+              entryEquals(key.get(1), data.get(5));
+              assertEquals(read = value.read(b), 10);
+              assertEquals(new String(b, 0, read), "qwertyuiop");
+              assertEquals(read = value.read(b), -1);
+              break;
+            default:
+              assertTrue(false);
+          }
+        } catch (AssertionError e) {
+          e1 = e;
+        } finally {
+          value.close();
+        }
+        count++;
+      }
+      
+      @Override
+      protected void cleanup(Context context) throws IOException, 
InterruptedException {
+        try {
+          assertEquals(2, count);
+        } catch (AssertionError e) {
+          e2 = e;
+        }
+      }
+    }
+    
+    public static class TestNoClose extends 
Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> {
+      int count = 0;
+      
+      @Override
+      protected void map(List<Entry<Key,Value>> key, InputStream value, 
Context context) throws IOException, InterruptedException {
+        byte[] b = new byte[5];
+        int read;
+        try {
+          switch (count) {
+            case 0:
+              assertEquals(read = value.read(b), 5);
+              assertEquals(new String(b, 0, read), "asdfj");
+              break;
+            default:
+              assertTrue(false);
+          }
+        } catch (AssertionError e) {
+          e1 = e;
+        }
+        count++;
+        try {
+          context.nextKeyValue();
+          assertTrue(false);
+        } catch (IOException ioe) {
+          e3 = ioe;
+        }
+      }
+    }
+    
+    public static class TestBadData extends 
Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> {
+      @Override
+      protected void map(List<Entry<Key,Value>> key, InputStream value, 
Context context) throws IOException, InterruptedException {
+        byte[] b = new byte[20];
+        try {
+          assertEquals(key.size(), 2);
+          entryEquals(key.get(0), baddata.get(0));
+          entryEquals(key.get(1), baddata.get(1));
+        } catch (AssertionError e) {
+          e0 = e;
+        }
+        try {
+          value.read(b);
+          try {
+            assertTrue(false);
+          } catch (AssertionError e) {
+            e1 = e;
+          }
+        } catch (Exception e) {}
+        try {
+          value.close();
+          try {
+            assertTrue(false);
+          } catch (AssertionError e) {
+            e2 = e;
+          }
+        } catch (Exception e) {}
+      }
+    }
+    
+    @Override
+    public int run(String[] args) throws Exception {
+      if (args.length != 5) {
+        throw new IllegalArgumentException("Usage : " + 
CIFTester.class.getName() + " <instance name> <user> <pass> <table> 
<mapperClass>");
+      }
+      
+      String instance = args[0];
+      String user = args[1];
+      String pass = args[2];
+      String table = args[3];
+      
+      Job job = new Job(getConf(), this.getClass().getSimpleName() + "_" + 
System.currentTimeMillis());
+      job.setJarByClass(this.getClass());
+      
+      job.setInputFormatClass(ChunkInputFormat.class);
+      
+      ChunkInputFormat.setConnectorInfo(job, new UserPassToken(user, pass));
+      ChunkInputFormat.setInputTableName(job, table);
+      ChunkInputFormat.setScanAuthorizations(job, AUTHS);
+      ChunkInputFormat.setMockInstance(job, instance);
+      
+      @SuppressWarnings("unchecked")
+      Class<? extends Mapper<?,?,?,?>> forName = (Class<? extends 
Mapper<?,?,?,?>>) Class.forName(args[4]);
+      job.setMapperClass(forName);
+      job.setMapOutputKeyClass(Key.class);
+      job.setMapOutputValueClass(Value.class);
+      job.setOutputFormatClass(NullOutputFormat.class);
+      
+      job.setNumReduceTasks(0);
+      
+      job.waitForCompletion(true);
+      
+      return job.isSuccessful() ? 0 : 1;
+    }
+    
+    public static int main(String[] args) throws Exception {
+      return ToolRunner.run(CachedConfiguration.getInstance(), new 
CIFTester(), args);
+    }
+  }
+  
+  public void test() throws Exception {
     MockInstance instance = new MockInstance("instance1");
     Connector conn = instance.getConnector(new UserPassToken("root", ""));
     conn.tableOperations().create("test");
@@ -88,44 +237,12 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = ContextFactory.createJobContext();
-    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", 
"".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
-    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance1");
-    ChunkInputFormat cif = new ChunkInputFormat();
-    RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = 
ContextFactory.createTaskAttemptContext(job.getConfiguration());
-    RecordReader<List<Entry<Key,Value>>,InputStream> rr = 
cif.createRecordReader(ris, tac);
-    rr.initialize(ris, tac);
-    
-    assertTrue(rr.nextKeyValue());
-    List<Entry<Key,Value>> info = rr.getCurrentKey();
-    InputStream cis = rr.getCurrentValue();
-    byte[] b = new byte[20];
-    int read;
-    assertEquals(info.size(), 2);
-    entryEquals(info.get(0), data.get(0));
-    entryEquals(info.get(1), data.get(1));
-    assertEquals(read = cis.read(b), 8);
-    assertEquals(new String(b, 0, read), "asdfjkl;");
-    assertEquals(read = cis.read(b), -1);
-    cis.close();
-    
-    assertTrue(rr.nextKeyValue());
-    info = rr.getCurrentKey();
-    cis = rr.getCurrentValue();
-    assertEquals(info.size(), 2);
-    entryEquals(info.get(0), data.get(4));
-    entryEquals(info.get(1), data.get(5));
-    assertEquals(read = cis.read(b), 10);
-    assertEquals(new String(b, 0, read), "qwertyuiop");
-    assertEquals(read = cis.read(b), -1);
-    cis.close();
-    
-    assertFalse(rr.nextKeyValue());
+    assertEquals(0, CIFTester.main(new String[] {"instance1", "root", "", 
"test", CIFTester.TestMapper.class.getName()}));
+    assertNull(e1);
+    assertNull(e2);
   }
   
-  public void testErrorOnNextWithoutClose() throws IOException, 
InterruptedException, AccumuloException, AccumuloSecurityException, 
TableNotFoundException,
-      TableExistsException {
+  public void testErrorOnNextWithoutClose() throws Exception {
     MockInstance instance = new MockInstance("instance2");
     Connector conn = instance.getConnector(new UserPassToken("root", ""));
     conn.tableOperations().create("test");
@@ -139,33 +256,13 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = ContextFactory.createJobContext();
-    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", 
"".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
-    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance2");
-    ChunkInputFormat cif = new ChunkInputFormat();
-    RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = 
ContextFactory.createTaskAttemptContext(job.getConfiguration());
-    RecordReader<List<Entry<Key,Value>>,InputStream> crr = 
cif.createRecordReader(ris, tac);
-    crr.initialize(ris, tac);
-    
-    assertTrue(crr.nextKeyValue());
-    InputStream cis = crr.getCurrentValue();
-    byte[] b = new byte[5];
-    int read;
-    assertEquals(read = cis.read(b), 5);
-    assertEquals(new String(b, 0, read), "asdfj");
-    
-    try {
-      crr.nextKeyValue();
-      assertNotNull(null);
-    } catch (Exception e) {
-      log.debug("EXCEPTION " + e.getMessage());
-      assertNull(null);
-    }
+    assertEquals(1, CIFTester.main(new String[] {"instance2", "root", "", 
"test", CIFTester.TestNoClose.class.getName()}));
+    assertNull(e1);
+    assertNull(e2);
+    assertNotNull(e3);
   }
   
-  public void testInfoWithoutChunks() throws IOException, 
InterruptedException, AccumuloException, AccumuloSecurityException, 
TableNotFoundException,
-      TableExistsException {
+  public void testInfoWithoutChunks() throws Exception {
     MockInstance instance = new MockInstance("instance3");
     Connector conn = instance.getConnector(new UserPassToken("root", ""));
     conn.tableOperations().create("test");
@@ -178,35 +275,9 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = ContextFactory.createJobContext();
-    ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", 
"".getBytes(), "test", new Authorizations("A", "B", "C", "D"));
-    ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance3");
-    ChunkInputFormat cif = new ChunkInputFormat();
-    RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = 
ContextFactory.createTaskAttemptContext(job.getConfiguration());
-    RecordReader<List<Entry<Key,Value>>,InputStream> crr = 
cif.createRecordReader(ris, tac);
-    crr.initialize(ris, tac);
-    
-    assertTrue(crr.nextKeyValue());
-    List<Entry<Key,Value>> info = crr.getCurrentKey();
-    InputStream cis = crr.getCurrentValue();
-    byte[] b = new byte[20];
-    assertEquals(info.size(), 2);
-    entryEquals(info.get(0), baddata.get(0));
-    entryEquals(info.get(1), baddata.get(1));
-    try {
-      cis.read(b);
-      assertNotNull(null);
-    } catch (Exception e) {
-      log.debug("EXCEPTION " + e.getMessage());
-      assertNull(null);
-    }
-    try {
-      cis.close();
-      assertNotNull(null);
-    } catch (Exception e) {
-      log.debug("EXCEPTION " + e.getMessage());
-      assertNull(null);
-    }
+    assertEquals(0, CIFTester.main(new String[] {"instance3", "root", "", 
"test", CIFTester.TestBadData.class.getName()}));
+    assertNull(e0);
+    assertNull(e1);
+    assertNull(e2);
   }
 }

Modified: 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/TStore.java
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/TStore.java?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/TStore.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/TStore.java
 Wed Jan 23 20:51:59 2013
@@ -135,7 +135,6 @@ public interface TStore<T> {
   /**
    * list all transaction ids in store
    * 
-   * @return
    */
   
   public List<Long> list();

Propchange: 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/ZooStore.java
------------------------------------------------------------------------------
  Merged 
/accumulo/trunk/fate/src/main/java/org/apache/accumulo/fate/ZooStore.java:r1433135-1437607

Propchange: 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooSession.java
------------------------------------------------------------------------------
  Merged 
/accumulo/trunk/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooSession.java:r1433135-1437607

Modified: 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooUtil.java
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooUtil.java?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooUtil.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/fate/src/main/java/org/apache/accumulo/fate/zookeeper/ZooUtil.java
 Wed Jan 23 20:51:59 2013
@@ -16,6 +16,7 @@
  */
 package org.apache.accumulo.fate.zookeeper;
 
+import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -59,7 +60,7 @@ public class ZooUtil {
       else
         path = root + "/" + sa[0].substring(0, lastSlash);
       node = sa[0].substring(lastSlash + 1);
-      eid = Long.parseLong(sa[1], 16);
+      eid = new BigInteger(sa[1], 16).longValue();
     }
     
     public LockID(String path, String node, long eid) {

Propchange: accumulo/branches/ACCUMULO-259/packages/
------------------------------------------------------------------------------
  Merged /accumulo/trunk/packages:r1433135-1437607

Modified: accumulo/branches/ACCUMULO-259/pom.xml
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/pom.xml?rev=1437726&r1=1437725&r2=1437726&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/pom.xml (original)
+++ accumulo/branches/ACCUMULO-259/pom.xml Wed Jan 23 20:51:59 2013
@@ -53,6 +53,7 @@
     <module>start</module>
     <module>examples</module>
     <module>assemble</module>
+    <module>proxy</module>
     <module>test</module>
   </modules>
 
@@ -144,7 +145,7 @@
             <configuration>
               <outputDirectory>../lib</outputDirectory>
               <!-- just grab the non-provided runtime dependencies -->
-              
<includeArtifactIds>commons-collections,commons-configuration,commons-io,commons-lang,jline,log4j,libthrift,commons-logging,commons-logging-api,commons-vfs2,gson,jcommander</includeArtifactIds>
+              
<includeArtifactIds>commons-collections,commons-configuration,commons-io,commons-lang,jline,log4j,libthrift,commons-logging,commons-logging-api,commons-vfs2,gson,jcommander,guava</includeArtifactIds>
               <excludeTransitive>true</excludeTransitive>
             </configuration>
           </execution>
@@ -208,6 +209,7 @@
           <configuration>
             <formats>
               <format>xml</format>
+              <format>html</format>
             </formats>
           </configuration>
         </plugin>
@@ -531,6 +533,7 @@
         <slf4j.version>1.6.1</slf4j.version>
         <hadoop.version>2.0.2-alpha</hadoop.version>
         <avro.version>1.5.3</avro.version>
+        <httpclient.version>3.1</httpclient.version>
       </properties>
       <dependencyManagement>
         <dependencies>
@@ -546,6 +549,12 @@
             <version>${avro.version}</version>
             <scope>provided</scope>
           </dependency>
+          <dependency>
+            <groupId>commons-httpclient</groupId>
+            <artifactId>commons-httpclient</artifactId>
+            <version>${httpclient.version}</version>
+            <scope>provided</scope>
+          </dependency>
         </dependencies>
       </dependencyManagement>
     </profile>

Propchange: accumulo/branches/ACCUMULO-259/proxy/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Wed Jan 23 20:51:59 2013
@@ -0,0 +1,4 @@
+.classpath
+.project
+target
+.settings

Copied: 
accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java
 (from r1437607, 
accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java)
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java?p2=accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java&p1=accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java&r1=1437607&r2=1437726&rev=1437726&view=diff
==============================================================================
--- 
accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java 
(original)
+++ 
accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/ProxyServer.java
 Wed Jan 23 20:51:59 2013
@@ -63,6 +63,7 @@ import org.apache.accumulo.core.security
 import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.security.SystemPermission;
 import org.apache.accumulo.core.security.TablePermission;
+import org.apache.accumulo.core.security.tokens.UserPassToken;
 import org.apache.accumulo.core.util.ByteBufferUtil;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.accumulo.proxy.thrift.AccumuloProxy;
@@ -578,7 +579,7 @@ public class ProxyServer implements Accu
   @Override
   public void createUser(UserPass userpass, String user, ByteBuffer password) 
throws TException {
     try {
-      getConnector(userpass).securityOperations().createUser(user, 
password.array());
+      getConnector(userpass).securityOperations().createUser(new 
UserPassToken(user, password));
     } catch (Exception e) {
       throw translateException(e);
     }

Copied: 
accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java
 (from r1437607, 
accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java)
URL: 
http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java?p2=accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java&p1=accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java&r1=1437607&r2=1437726&rev=1437726&view=diff
==============================================================================
--- 
accumulo/trunk/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java
 (original)
+++ 
accumulo/branches/ACCUMULO-259/proxy/src/main/java/org/apache/accumulo/proxy/thrift/AccumuloException.java
 Wed Jan 23 20:51:59 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *
@@ -30,7 +46,7 @@ import java.util.Arrays;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class AccumuloException extends TException implements 
org.apache.thrift.TBase<AccumuloException, AccumuloException._Fields>, 
java.io.Serializable, Cloneable {
+@SuppressWarnings("all") public class AccumuloException extends TException 
implements org.apache.thrift.TBase<AccumuloException, 
AccumuloException._Fields>, java.io.Serializable, Cloneable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("AccumuloException");
 
   private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new 
org.apache.thrift.protocol.TField("msg", 
org.apache.thrift.protocol.TType.STRING, (short)1);
@@ -44,7 +60,7 @@ public class AccumuloException extends T
   public String msg; // required
 
   /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+  @SuppressWarnings("all") public enum _Fields implements 
org.apache.thrift.TFieldIdEnum {
     MSG((short)1, "msg");
 
     private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();


Reply via email to