Author: jimk Date: Fri Aug 31 23:22:01 2007 New Revision: 571711 URL: http://svn.apache.org/viewvc?rev=571711&view=rev Log: HADOOP-1821 Replace all String.getBytes() with String.getBytes("UTF-8")
Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HGlobals.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestBatchUpdate.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestGet.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHMemcache.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHStoreFile.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestSplit.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/StringKey.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/TestFilter.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original) +++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Fri Aug 31 23:22:01 2007 @@ -28,6 +28,7 @@ (Ning Li via Stack) HADOOP-1800 output should default utf8 encoding HADOOP-1814 TestCleanRegionServerExit fails too often on Hudson + HADOOP-1821 Replace all String.getBytes() with String.getBytes("UTF-8") IMPROVEMENTS HADOOP-1737 Make HColumnDescriptor data publically members settable Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java Fri Aug 31 23:22:01 2007 @@ -19,7 +19,6 @@ */ package org.apache.hadoop.hbase; -import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.io.Text; /** @@ -86,11 +85,24 @@ // Always store the location of the root table's HRegion. // This HRegion is never split. - + // region name = table + startkey + regionid. This is the row key. // each row in the root and meta tables describes exactly 1 region // Do we ever need to know all the information that we are storing? + // Note that the name of the root table starts with "-" and the name of the + // meta table starts with "." Why? it's a trick. It turns out that when we + // store region names in memory, we use a SortedMap. Since "-" sorts before + // "." (and since no other table name can start with either of these + // characters, the root region will always be the first entry in such a Map, + // followed by all the meta regions (which will be ordered by their starting + // row key as well), followed by all user tables. So when the Master is + // choosing regions to assign, it will always choose the root region first, + // followed by the meta regions, followed by user regions. Since the root + // and meta regions always need to be on-line, this ensures that they will + // be the first to be reassigned if the server(s) they are being served by + // should go down. + /** The root table's name. */ static final Text ROOT_TABLE_NAME = new Text("-ROOT-"); @@ -133,11 +145,4 @@ /** When we encode strings, we always specify UTF8 encoding */ static final String UTF8_ENCODING = "UTF-8"; - /** Value stored for a deleted item */ - static final ImmutableBytesWritable DELETE_BYTES = - new ImmutableBytesWritable("HBASE::DELETEVAL".getBytes()); - - /** Value written to HLog on a complete cache flush */ - static final ImmutableBytesWritable COMPLETE_CACHEFLUSH = - new ImmutableBytesWritable("HBASE::CACHEFLUSH".getBytes()); } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HGlobals.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HGlobals.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HGlobals.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HGlobals.java Fri Aug 31 23:22:01 2007 @@ -19,14 +19,28 @@ */ package org.apache.hadoop.hbase; +import java.io.UnsupportedEncodingException; +import org.apache.hadoop.hbase.io.ImmutableBytesWritable; + /** - * Global values used for finding and scanning the root and meta tables. + * Global values that require initialization that cannot be done in HConstants */ public class HGlobals implements HConstants { + + /** table descriptor for root table */ + public static HTableDescriptor rootTableDesc = null; + + /** region info for the root region */ + public static HRegionInfo rootRegionInfo = null; + + /** table descriptor for meta table */ + public static HTableDescriptor metaTableDesc = null; - static HTableDescriptor rootTableDesc = null; - static HRegionInfo rootRegionInfo = null; - static HTableDescriptor metaTableDesc = null; + /** Value stored for a deleted item */ + public static ImmutableBytesWritable deleteBytes = null; + + /** Value written to HLog on a complete cache flush */ + public static ImmutableBytesWritable completeCacheFlush = null; static { rootTableDesc = new HTableDescriptor(ROOT_TABLE_NAME.toString()); @@ -38,5 +52,17 @@ metaTableDesc = new HTableDescriptor(META_TABLE_NAME.toString()); metaTableDesc.addFamily(new HColumnDescriptor(COLUMN_FAMILY, 1, HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE, null)); + + try { + deleteBytes = + new ImmutableBytesWritable("HBASE::DELETEVAL".getBytes(UTF8_ENCODING)); + + completeCacheFlush = + new ImmutableBytesWritable("HBASE::CACHEFLUSH".getBytes(UTF8_ENCODING)); + + } catch (UnsupportedEncodingException e) { + assert(false); + } + } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java Fri Aug 31 23:22:01 2007 @@ -433,7 +433,7 @@ } writer.append(new HLogKey(regionName, tableName, HLog.METAROW, logSeqId), - new HLogEdit(HLog.METACOLUMN, COMPLETE_CACHEFLUSH.get(), + new HLogEdit(HLog.METACOLUMN, HGlobals.completeCacheFlush.get(), System.currentTimeMillis())); numEntries.getAndIncrement(); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java Fri Aug 31 23:22:01 2007 @@ -258,7 +258,7 @@ for (Map.Entry<HStoreKey, byte []> es: tailMap.entrySet()) { HStoreKey itKey = es.getKey(); if (itKey.matchesRowCol(curKey)) { - if(HConstants.DELETE_BYTES.compareTo(es.getValue()) == 0) { + if(HGlobals.deleteBytes.compareTo(es.getValue()) == 0) { // TODO: Shouldn't this be a continue rather than a break? Perhaps // the intent is that this DELETE_BYTES is meant to suppress older // info -- see 5.4 Compactions in BigTable -- but how does this jibe Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java Fri Aug 31 23:22:01 2007 @@ -1103,7 +1103,7 @@ * @throws IOException */ public void put(long lockid, Text targetCol, byte [] val) throws IOException { - if (DELETE_BYTES.compareTo(val) == 0) { + if (HGlobals.deleteBytes.compareTo(val) == 0) { throw new IOException("Cannot insert value: " + val); } localput(lockid, targetCol, val); @@ -1117,7 +1117,7 @@ * @throws IOException */ public void delete(long lockid, Text targetCol) throws IOException { - localput(lockid, targetCol, DELETE_BYTES.get()); + localput(lockid, targetCol, HGlobals.deleteBytes.get()); } /** Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Fri Aug 31 23:22:01 2007 @@ -1064,7 +1064,7 @@ for(Map.Entry<Text, byte []> e: results.entrySet()) { HStoreKey k = new HStoreKey(key.getRow(), e.getKey(), key.getTimestamp()); byte [] val = e.getValue(); - if (DELETE_BYTES.compareTo(val) == 0) { + if (HGlobals.deleteBytes.compareTo(val) == 0) { // Column value is deleted. Don't return it. continue; } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java Fri Aug 31 23:22:01 2007 @@ -1033,7 +1033,7 @@ Text readcol = readkey.getColumn(); if (results.get(readcol) == null && key.matchesWithoutColumn(readkey)) { - if(readval.equals(HConstants.DELETE_BYTES)) { + if(readval.equals(HGlobals.deleteBytes)) { break; } results.put(new Text(readcol), readval.get()); @@ -1086,14 +1086,14 @@ continue; } if (readkey.matchesRowCol(key)) { - if(readval.equals(HConstants.DELETE_BYTES)) { + if(readval.equals(HGlobals.deleteBytes)) { break; } results.add(readval.get()); readval = new ImmutableBytesWritable(); while(map.next(readkey, readval) && readkey.matchesRowCol(key)) { if ((numVersions > 0 && (results.size() >= numVersions)) - || readval.equals(HConstants.DELETE_BYTES)) { + || readval.equals(HGlobals.deleteBytes)) { break; } results.add(readval.get()); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java Fri Aug 31 23:22:01 2007 @@ -40,8 +40,12 @@ /** audible keyboard bells */ public static final boolean DEFAULT_BELL_ENABLED = true; - /** Main method */ - public static void main(String args[]) throws IOException { + /** Main method + * + * @param args not used + * @throws IOException + */ + public static void main(@SuppressWarnings("unused") String args[]) throws IOException { Configuration conf = new HBaseConfiguration(); ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled", @@ -91,8 +95,14 @@ return (queryStr.toString().equals("")) ? "HBase > " : " --> "; } - /** return a string of code execution time. */ + /** + * @param watch true if execution time should be computed and returned + * @param start start of time interval + * @param end end of time interval + * @return a string of code execution time. */ public static String executeTime(boolean watch, long start, long end) { - return (watch) ? "(" + String.format("%.2f", (end - start) * 0.001) + " sec)" : ""; + return (watch) ? + "(" + String.format("%.2f", (end - start) * 0.001) + " sec)" : + ""; } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java Fri Aug 31 23:22:01 2007 @@ -33,7 +33,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HGlobals; import org.apache.hadoop.io.Text; /** @@ -176,7 +176,7 @@ } } if (nullColumns.contains(colKey)) { - if (data != null && !Arrays.equals(HConstants.DELETE_BYTES.get(), data)) { + if (data != null && !Arrays.equals(HGlobals.deleteBytes.get(), data)) { if (LOG.isDebugEnabled()) { LOG.debug("filter returning true for rowKey: " + rowKey + " colKey: " + colKey); @@ -198,7 +198,7 @@ public boolean filterNotNull(final TreeMap<Text, byte[]> columns) { for (Entry<Text, byte[]> col : columns.entrySet()) { if (nullColumns.contains(col.getKey()) - && !Arrays.equals(HConstants.DELETE_BYTES.get(), col.getValue())) { + && !Arrays.equals(HGlobals.deleteBytes.get(), col.getValue())) { if (LOG.isDebugEnabled()) { LOG.debug("filterNotNull returning true for colKey: " + col.getKey() + ", column should be null."); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Fri Aug 31 23:22:01 2007 @@ -20,9 +20,11 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Map; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -147,7 +149,11 @@ if(i > 0) { sb.append(" "); } - sb.append(new String(vals[i])); + try { + sb.append(new String(vals[i], HConstants.UTF8_ENCODING)); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } } return new Text(sb.toString()); } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Fri Aug 31 23:22:01 2007 @@ -159,7 +159,7 @@ for (char d = secondCharStart; d <= LAST_CHAR; d++) { for (char e = thirdCharStart; e <= LAST_CHAR; e++) { byte [] bytes = new byte [] {(byte)c, (byte)d, (byte)e}; - Text t = new Text(new String(bytes)); + Text t = new Text(new String(bytes, HConstants.UTF8_ENCODING)); if (endKey != null && endKey.getLength() > 0 && endKey.compareTo(t) <= 0) { break EXIT; Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java Fri Aug 31 23:22:01 2007 @@ -21,6 +21,7 @@ import java.io.IOException; import java.io.PrintStream; +import java.io.UnsupportedEncodingException; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; @@ -312,7 +313,13 @@ while(val.length() < ROW_LENGTH) { val.append(Long.toString(this.rand.nextLong())); } - return val.toString().getBytes(); + byte[] value = null; + try { + value = val.toString().getBytes(HConstants.UTF8_ENCODING); + } catch (UnsupportedEncodingException e) { + assert(false); + } + return value; } private String generateStatus(final int sr, final int i, final int lr) { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestBatchUpdate.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestBatchUpdate.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestBatchUpdate.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestBatchUpdate.java Fri Aug 31 23:22:01 2007 @@ -99,7 +99,7 @@ while(scanner.next(key, results)) { for(Map.Entry<Text, byte[]> e: results.entrySet()) { System.out.println(key + ": row: " + e.getKey() + " value: " + - new String(e.getValue())); + new String(e.getValue(), HConstants.UTF8_ENCODING)); } } } catch (Exception e) { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestGet.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestGet.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestGet.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestGet.java Fri Aug 31 23:22:01 2007 @@ -69,7 +69,7 @@ } /** - * Constructor + * the test * @throws IOException */ public void testGet() throws IOException { @@ -144,14 +144,14 @@ lockid = r.startUpdate(ROW_KEY); r.put(lockid, new Text(HConstants.COLUMN_FAMILY + "region"), - "region2".getBytes()); + "region2".getBytes(HConstants.UTF8_ENCODING)); String otherServerName = "bar.foo.com:4321"; r.put(lockid, HConstants.COL_SERVER, Writables.stringToBytes(new HServerAddress(otherServerName).toString())); r.put(lockid, new Text(HConstants.COLUMN_FAMILY + "junk"), - "junk".getBytes()); + "junk".getBytes(HConstants.UTF8_ENCODING)); r.commit(lockid, System.currentTimeMillis()); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java Fri Aug 31 23:22:01 2007 @@ -90,8 +90,10 @@ for (int k = FIRST_ROW; k <= NUM_VALS; k++) { long writeid = table.startUpdate(new Text("row_" + k)); - table.put(writeid, CONTENTS_BASIC, (CONTENTSTR + k).getBytes()); - table.put(writeid, new Text(ANCHORNUM + k), (ANCHORSTR + k).getBytes()); + table.put(writeid, CONTENTS_BASIC, + (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING)); + table.put(writeid, new Text(ANCHORNUM + k), + (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING)); table.commit(writeid); } System.out.println("Write " + NUM_VALS + " rows. Elapsed time: " @@ -107,14 +109,14 @@ byte bodydata[] = table.get(rowlabel, CONTENTS_BASIC); assertNotNull(bodydata); - String bodystr = new String(bodydata).toString().trim(); + String bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim(); String teststr = CONTENTSTR + k; assertEquals("Incorrect value for key: (" + rowlabel + "," + CONTENTS_BASIC + "), expected: '" + teststr + "' got: '" + bodystr + "'", bodystr, teststr); collabel = new Text(ANCHORNUM + k); bodydata = table.get(rowlabel, collabel); - bodystr = new String(bodydata).toString().trim(); + bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim(); teststr = ANCHORSTR + k; assertEquals("Incorrect value for key: (" + rowlabel + "," + collabel + "), expected: '" + teststr + "' got: '" + bodystr + "'", @@ -145,7 +147,7 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte val[] = curVals.get(col); - String curval = new String(val).trim(); + String curval = new String(val, HConstants.UTF8_ENCODING).trim(); if(col.compareTo(CONTENTS_BASIC) == 0) { assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp() Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHLog.java Fri Aug 31 23:22:01 2007 @@ -84,7 +84,7 @@ assertEquals(tableName, key.getTablename()); assertEquals(HLog.METAROW, key.getRow()); assertEquals(HLog.METACOLUMN, val.getColumn()); - assertEquals(0, COMPLETE_CACHEFLUSH.compareTo(val.getVal())); + assertEquals(0, HGlobals.completeCacheFlush.compareTo(val.getVal())); System.out.println(key + " " + val); } } finally { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHMemcache.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHMemcache.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHMemcache.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHMemcache.java Fri Aug 31 23:22:01 2007 @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; @@ -85,7 +86,11 @@ TreeMap<Text, byte []> columns = new TreeMap<Text, byte []>(); for (int ii = 0; ii < COLUMNS_COUNT; ii++) { Text k = getColumnName(i, ii); - columns.put(k, k.toString().getBytes()); + try { + columns.put(k, k.toString().getBytes(HConstants.UTF8_ENCODING)); + } catch (UnsupportedEncodingException e) { + fail(); + } } hmc.add(getRowName(i), columns, System.currentTimeMillis()); } @@ -147,7 +152,7 @@ } private void isExpectedRow(final int rowIndex, - TreeMap<Text, byte []> row) { + TreeMap<Text, byte []> row) throws UnsupportedEncodingException { int i = 0; for (Text colname: row.keySet()) { String expectedColname = @@ -159,13 +164,15 @@ // for BytesWriteable. For comparison, comvert bytes to // String and trim to remove trailing null bytes. byte [] value = row.get(colname); - String colvalueStr = new String(value).trim(); + String colvalueStr = new String(value, HConstants.UTF8_ENCODING).trim(); assertEquals("Content", colnameStr, colvalueStr); } } - /** Test getFull from memcache */ - public void testGetFull() { + /** Test getFull from memcache + * @throws UnsupportedEncodingException + */ + public void testGetFull() throws UnsupportedEncodingException { addRows(this.hmemcache); for (int i = 0; i < ROW_COUNT; i++) { HStoreKey hsk = new HStoreKey(getRowName(i)); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHRegion.java Fri Aug 31 23:22:01 2007 @@ -121,8 +121,10 @@ for (int k = FIRST_ROW; k <= NUM_VALS; k++) { long writeid = region.startUpdate(new Text("row_" + k)); - region.put(writeid, CONTENTS_BASIC, (CONTENTSTR + k).getBytes()); - region.put(writeid, new Text(ANCHORNUM + k), (ANCHORSTR + k).getBytes()); + region.put(writeid, CONTENTS_BASIC, + (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING)); + region.put(writeid, new Text(ANCHORNUM + k), + (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING)); region.commit(writeid, System.currentTimeMillis()); } System.out.println("Write " + NUM_VALS + " rows. Elapsed time: " @@ -147,14 +149,14 @@ byte [] bodydata = region.get(rowlabel, CONTENTS_BASIC); assertNotNull(bodydata); - String bodystr = new String(bodydata).toString().trim(); + String bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim(); String teststr = CONTENTSTR + k; assertEquals("Incorrect value for key: (" + rowlabel + "," + CONTENTS_BASIC + "), expected: '" + teststr + "' got: '" + bodystr + "'", bodystr, teststr); collabel = new Text(ANCHORNUM + k); bodydata = region.get(rowlabel, collabel); - bodystr = new String(bodydata).toString().trim(); + bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim(); teststr = ANCHORSTR + k; assertEquals("Incorrect value for key: (" + rowlabel + "," + collabel + "), expected: '" + teststr + "' got: '" + bodystr + "'", @@ -170,7 +172,8 @@ // Try put with bad lockid. boolean exceptionThrown = false; try { - region.put(-1, CONTENTS_BASIC, "bad input".getBytes()); + region.put(-1, CONTENTS_BASIC, + "bad input".getBytes(HConstants.UTF8_ENCODING)); } catch (LockException e) { exceptionThrown = true; } @@ -183,7 +186,7 @@ lockid = region.startUpdate(new Text("Some old key")); String unregisteredColName = "FamilyGroup:FamilyLabel"; region.put(lockid, new Text(unregisteredColName), - unregisteredColName.getBytes()); + unregisteredColName.getBytes(HConstants.UTF8_ENCODING)); } catch (IOException e) { exceptionThrown = true; } finally { @@ -276,8 +279,8 @@ String kLabel = String.format("%1$03d", k); long lockid = region.startUpdate(new Text("row_vals1_" + kLabel)); - region.put(lockid, cols[0], vals1[k].getBytes()); - region.put(lockid, cols[1], vals1[k].getBytes()); + region.put(lockid, cols[0], vals1[k].getBytes(HConstants.UTF8_ENCODING)); + region.put(lockid, cols[1], vals1[k].getBytes(HConstants.UTF8_ENCODING)); region.commit(lockid, System.currentTimeMillis()); numInserted += 2; } @@ -300,10 +303,12 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for(int j = 0; j < cols.length; j++) { if(col.compareTo(cols[j]) == 0) { - assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp() + assertEquals("Error at:" + curKey.getRow() + "/" + + curKey.getTimestamp() + ", Value for " + col + " should be: " + k + ", but was fetched as: " + curval, k, curval); numFetched++; @@ -345,10 +350,12 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for(int j = 0; j < cols.length; j++) { if(col.compareTo(cols[j]) == 0) { - assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp() + assertEquals("Error at:" + curKey.getRow() + "/" + + curKey.getTimestamp() + ", Value for " + col + " should be: " + k + ", but was fetched as: " + curval, k, curval); numFetched++; @@ -375,8 +382,8 @@ String kLabel = String.format("%1$03d", k); long lockid = region.startUpdate(new Text("row_vals1_" + kLabel)); - region.put(lockid, cols[0], vals1[k].getBytes()); - region.put(lockid, cols[1], vals1[k].getBytes()); + region.put(lockid, cols[0], vals1[k].getBytes(HConstants.UTF8_ENCODING)); + region.put(lockid, cols[1], vals1[k].getBytes(HConstants.UTF8_ENCODING)); region.commit(lockid, System.currentTimeMillis()); numInserted += 2; } @@ -398,10 +405,12 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for(int j = 0; j < cols.length; j++) { if(col.compareTo(cols[j]) == 0) { - assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp() + assertEquals("Error at:" + curKey.getRow() + "/" + + curKey.getTimestamp() + ", Value for " + col + " should be: " + k + ", but was fetched as: " + curval, k, curval); numFetched++; @@ -443,7 +452,8 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for (int j = 0; j < cols.length; j++) { if (col.compareTo(cols[j]) == 0) { assertEquals("Value for " + col + " should be: " + k @@ -480,7 +490,8 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for (int j = 0; j < cols.length; j++) { if (col.compareTo(cols[j]) == 0) { assertEquals("Value for " + col + " should be: " + k @@ -529,7 +540,8 @@ // Write to the HRegion long writeid = region.startUpdate(new Text("row_" + k)); - region.put(writeid, CONTENTS_BODY, buf1.toString().getBytes()); + region.put(writeid, CONTENTS_BODY, + buf1.toString().getBytes(HConstants.UTF8_ENCODING)); region.commit(writeid, System.currentTimeMillis()); if (k > 0 && k % (N_ROWS / 100) == 0) { System.out.println("Flushing write #" + k); @@ -656,7 +668,7 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - String curval = new String(val).trim(); + String curval = new String(val, HConstants.UTF8_ENCODING).trim(); if(col.compareTo(CONTENTS_BASIC) == 0) { assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp() @@ -709,7 +721,8 @@ for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) { Text col = it.next(); byte [] val = curVals.get(col); - int curval = Integer.parseInt(new String(val).trim()); + int curval = + Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim()); for (int j = 0; j < cols.length; j++) { if (col.compareTo(cols[j]) == 0) { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHStoreFile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHStoreFile.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHStoreFile.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestHStoreFile.java Fri Aug 31 23:22:01 2007 @@ -79,7 +79,7 @@ try { for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) { byte[] b = new byte[] {(byte)d}; - Text t = new Text(new String(b)); + Text t = new Text(new String(b, HConstants.UTF8_ENCODING)); writer.append(new HStoreKey(t, t, System.currentTimeMillis()), new ImmutableBytesWritable(t.getBytes())); } @@ -101,7 +101,7 @@ for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) { for (char e = FIRST_CHAR; e <= LAST_CHAR; e++) { byte[] b = new byte[] { (byte) d, (byte) e }; - Text t = new Text(new String(b)); + Text t = new Text(new String(b, HConstants.UTF8_ENCODING)); writer.append(new HStoreKey(t, t, System.currentTimeMillis()), new ImmutableBytesWritable(t.getBytes())); } @@ -248,7 +248,7 @@ LOG.info("Last in top: " + key.toString()); top.getClosest(midkey, value); // Assert value is same as key. - assertEquals(new String(value.get()), + assertEquals(new String(value.get(), HConstants.UTF8_ENCODING), ((HStoreKey) midkey).getRow().toString()); // Next test using a midkey that does not exist in the file. Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestScanner2.java Fri Aug 31 23:22:01 2007 @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -58,9 +59,18 @@ final char LAST_ROWKEY = 'z'; final char FIRST_COLKEY = '0'; final char LAST_COLKEY = '3'; - final byte[] GOOD_BYTES = "goodstuff".getBytes(); - final byte[] BAD_BYTES = "badstuff".getBytes(); + static byte[] GOOD_BYTES = null; + static byte[] BAD_BYTES = null; + static { + try { + GOOD_BYTES = "goodstuff".getBytes(HConstants.UTF8_ENCODING); + BAD_BYTES = "badstuff".getBytes(HConstants.UTF8_ENCODING); + } catch (UnsupportedEncodingException e) { + fail(); + } + } + /** * Test the scanner's handling of various filters. * @@ -260,7 +270,8 @@ HTable t = new HTable(conf, table); try { long lockid = t.startUpdate(region.getRegionName()); - t.put(lockid, HConstants.COL_REGIONINFO, Writables.getBytes(region.getRegionInfo())); + t.put(lockid, HConstants.COL_REGIONINFO, + Writables.getBytes(region.getRegionInfo())); t.put(lockid, HConstants.COL_SERVER, Writables.stringToBytes(serverAddress.toString())); t.put(lockid, HConstants.COL_STARTCODE, Writables.longToBytes(startCode)); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestSplit.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestSplit.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestSplit.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestSplit.java Fri Aug 31 23:22:01 2007 @@ -134,7 +134,8 @@ // of each. int interval = (LAST_CHAR - FIRST_CHAR) / 3; for (HRegion r: sortedMap.values()) { - assertGet(r, COLFAMILY_NAME3, new Text(new String(b))); + assertGet(r, COLFAMILY_NAME3, + new Text(new String(b, HConstants.UTF8_ENCODING))); b[0] += interval; } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestTableMapReduce.java Fri Aug 31 23:22:01 2007 @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.io.UnsupportedEncodingException; import java.util.Map; import java.util.TreeMap; @@ -60,12 +61,20 @@ private Path dir; private MiniHBaseCluster hCluster = null; - private byte[][] values = { - "0123".getBytes(), - "abcd".getBytes(), - "wxyz".getBytes(), - "6789".getBytes() - }; + private static byte[][] values = null; + + static { + try { + values = new byte[][] { + "0123".getBytes(HConstants.UTF8_ENCODING), + "abcd".getBytes(HConstants.UTF8_ENCODING), + "wxyz".getBytes(HConstants.UTF8_ENCODING), + "6789".getBytes(HConstants.UTF8_ENCODING) + }; + } catch (UnsupportedEncodingException e) { + fail(); + } + } /** * [EMAIL PROTECTED] @@ -144,7 +153,8 @@ // Get the original value and reverse it String originalValue = - new String(((ImmutableBytesWritable)value.get(keys[0])).get()); + new String(((ImmutableBytesWritable)value.get(keys[0])).get(), + HConstants.UTF8_ENCODING); StringBuilder newValue = new StringBuilder(); for(int i = originalValue.length() - 1; i >= 0; i--) { newValue.append(originalValue.charAt(i)); @@ -153,8 +163,8 @@ // Now set the value to be collected MapWritable outval = new MapWritable(); - outval.put(TEXT_OUTPUT_COLUMN, - new ImmutableBytesWritable(newValue.toString().getBytes())); + outval.put(TEXT_OUTPUT_COLUMN, new ImmutableBytesWritable( + newValue.toString().getBytes(HConstants.UTF8_ENCODING))); output.collect(tKey, outval); } @@ -297,7 +307,7 @@ for(Map.Entry<Text, byte[]> e: results.entrySet()) { LOG.info(" column: " + e.getKey() + " value: " - + new String(e.getValue())); + + new String(e.getValue(), HConstants.UTF8_ENCODING)); } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRegExpRowFilter.java Fri Aug 31 23:22:01 2007 @@ -23,12 +23,14 @@ import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.UnsupportedEncodingException; import java.util.Map; import java.util.TreeMap; import junit.framework.TestCase; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HGlobals; import org.apache.hadoop.io.Text; /** @@ -39,9 +41,16 @@ RowFilterInterface mainFilter; final char FIRST_CHAR = 'a'; final char LAST_CHAR = 'e'; - byte [] GOOD_BYTES = "abc".getBytes(); final String HOST_PREFIX = "org.apache.site-"; - + static byte [] GOOD_BYTES = null; + + static { + try { + GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING); + } catch (UnsupportedEncodingException e) { + fail(); + } + } /** [EMAIL PROTECTED] */ @Override protected void setUp() throws Exception { @@ -112,7 +121,9 @@ yahooSite, filter.filter(new Text(yahooSite))); } - private void regexRowColumnTests(RowFilterInterface filter) { + private void regexRowColumnTests(RowFilterInterface filter) + throws UnsupportedEncodingException { + for (char c = FIRST_CHAR; c <= LAST_CHAR; c++) { Text t = createRow(c); for (Map.Entry<Text, byte []> e: this.colvalues.entrySet()) { @@ -129,7 +140,7 @@ // Do same but with bad bytes. assertTrue("Failed with character " + c, - filter.filter(r, col, "badbytes".getBytes())); + filter.filter(r, col, "badbytes".getBytes(HConstants.UTF8_ENCODING))); // Do with good bytes but bad column name. Should not filter out. assertFalse("Failed with character " + c, @@ -175,7 +186,7 @@ // that maps to a null value. // Testing row with columnKeys: a-e, e maps to null colvalues.put(new Text(new String(new char[] { LAST_CHAR })), - HConstants.DELETE_BYTES.get()); + HGlobals.deleteBytes.get()); assertFalse("Failed with last columnKey " + LAST_CHAR + " mapping to null.", filter.filterNotNull(colvalues)); } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/filter/TestRowFilterSet.java Fri Aug 31 23:22:01 2007 @@ -23,10 +23,12 @@ import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; +import java.io.UnsupportedEncodingException; import java.util.HashSet; import java.util.Set; import java.util.TreeMap; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.io.Text; @@ -42,10 +44,19 @@ static final int MAX_PAGES = 5; final char FIRST_CHAR = 'a'; final char LAST_CHAR = 'e'; - final byte[] GOOD_BYTES = "abc".getBytes(); - final byte[] BAD_BYTES = "def".getBytes(); TreeMap<Text, byte[]> colvalues; + static byte[] GOOD_BYTES = null; + static byte[] BAD_BYTES = null; + static { + try { + GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING); + BAD_BYTES = "def".getBytes(HConstants.UTF8_ENCODING); + } catch (UnsupportedEncodingException e) { + fail(); + } + } + /** [EMAIL PROTECTED] */ @Override protected void setUp() throws Exception { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/StringKey.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/StringKey.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/StringKey.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/StringKey.java Fri Aug 31 23:22:01 2007 @@ -48,6 +48,8 @@ */ package org.onelab.test; +import java.io.UnsupportedEncodingException; +import org.apache.hadoop.hbase.HConstants; import org.onelab.filter.Key; /** @@ -70,9 +72,10 @@ * Construct a Key using the specified String and default weight * * @param key String key value + * @throws UnsupportedEncodingException */ - public StringKey(String key){ - super(key.getBytes()); + public StringKey(String key) throws UnsupportedEncodingException { + super(key.getBytes(HConstants.UTF8_ENCODING)); } /** @@ -80,9 +83,12 @@ * * @param key - String key value * @param weight key weight + * @throws UnsupportedEncodingException */ - public StringKey(String key, double weight){ - super(key.getBytes(), weight); + public StringKey(String key, double weight) + throws UnsupportedEncodingException { + + super(key.getBytes(HConstants.UTF8_ENCODING), weight); } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/TestFilter.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/TestFilter.java?rev=571711&r1=571710&r2=571711&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/TestFilter.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/onelab/test/TestFilter.java Fri Aug 31 23:22:01 2007 @@ -48,6 +48,7 @@ */ package org.onelab.test; +import java.io.UnsupportedEncodingException; import junit.framework.TestCase; import org.onelab.filter.*; @@ -61,8 +62,10 @@ */ public class TestFilter extends TestCase { - /** Test a BloomFilter */ - public void testBloomFilter() { + /** Test a BloomFilter + * @throws UnsupportedEncodingException + */ + public void testBloomFilter() throws UnsupportedEncodingException { Filter bf = new BloomFilter(8, 2); Key key = new StringKey("toto"); Key k2 = new StringKey("lulu"); @@ -76,8 +79,10 @@ assertTrue(bf.membershipTest(new StringKey("abcd"))); // False positive } - /** Test a CountingBloomFilter */ - public void testCountingBloomFilter() { + /** Test a CountingBloomFilter + * @throws UnsupportedEncodingException + */ + public void testCountingBloomFilter() throws UnsupportedEncodingException { Filter bf = new CountingBloomFilter(8, 2); Key key = new StringKey("toto"); Key k2 = new StringKey("lulu"); @@ -91,8 +96,10 @@ assertTrue(bf.membershipTest(new StringKey("abcd"))); // False positive } - /** Test a DynamicBloomFilter */ - public void testDynamicBloomFilter() { + /** Test a DynamicBloomFilter + * @throws UnsupportedEncodingException + */ + public void testDynamicBloomFilter() throws UnsupportedEncodingException { Filter bf = new DynamicBloomFilter(8, 2, 2); Key key = new StringKey("toto"); Key k2 = new StringKey("lulu");