Author: jimk Date: Wed May 23 08:14:00 2007 New Revision: 540973 URL: http://svn.apache.org/viewvc?view=rev&rev=540973 Log: HADOOP-1420, HADOOP-1423. Findbugs changes, remove reference to removed class HLocking that broke main build.
Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HClient.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogKey.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original) +++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Wed May 23 08:14:00 2007 @@ -12,3 +12,5 @@ tolerant. 6. HADOOP-1418. HBase miscellaneous: unit test for HClient, client to do 'Performance Evaluation', etc. + 7. HADOOP-1420, HADOOP-1423. Findbugs changes, remove reference to removed + class HLocking. Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HAbstractScanner.java Wed May 23 08:14:00 2007 @@ -50,7 +50,7 @@ // 2. Match on the column family + column key regex // 3. Simple match: compare column family + column key literally - private class ColumnMatcher { + private static class ColumnMatcher { private boolean wildCardmatch; private MATCH_TYPE matchType; private String family; Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HClient.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HClient.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HClient.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HClient.java Wed May 23 08:14:00 2007 @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Random; import java.util.TreeMap; @@ -50,7 +49,7 @@ private HMasterInterface master; private final Configuration conf; - private class TableInfo { + private static class TableInfo { public HRegionInfo regionInfo; public HServerAddress serverAddress; @@ -133,7 +132,6 @@ try { Thread.sleep(this.clientTimeout); - } catch(InterruptedException e) { } } @@ -179,9 +177,9 @@ TreeMap<Text, TableInfo> metaServers = this.tablesToServers.get(META_TABLE_NAME); - if(metaServers == null) { // Don't know where the meta is + if (metaServers == null) { // Don't know where the meta is loadMetaFromRoot(tableName); - if(tableName.equals(META_TABLE_NAME) || tableName.equals(ROOT_TABLE_NAME)) { + if (tableName.equals(META_TABLE_NAME) || tableName.equals(ROOT_TABLE_NAME)) { // All we really wanted was the meta or root table return; } @@ -192,32 +190,19 @@ for(int tries = 0; this.tableServers.size() == 0 && tries < this.numRetries; tries++) { - - Text firstMetaRegion = null; - if(metaServers.containsKey(tableName)) { - firstMetaRegion = tableName; - - } else { - firstMetaRegion = metaServers.headMap(tableName).lastKey(); - } - for(Iterator<TableInfo> i - = metaServers.tailMap(firstMetaRegion).values().iterator(); - i.hasNext(); ) { - - TableInfo t = i.next(); - + Text firstMetaRegion = (metaServers.containsKey(tableName))? + tableName: metaServers.headMap(tableName).lastKey(); + for(TableInfo t: metaServers.tailMap(firstMetaRegion).values()) { scanOneMetaRegion(t, tableName); } - if(this.tableServers.size() == 0) { + if (this.tableServers.size() == 0) { // Table not assigned. Sleep and try again - - if(LOG.isDebugEnabled()) { + if (LOG.isDebugEnabled()) { LOG.debug("Sleeping. Table " + tableName + " not currently being served."); } try { Thread.sleep(this.clientTimeout); - } catch(InterruptedException e) { } if(LOG.isDebugEnabled()) { @@ -225,7 +210,7 @@ } } } - if(this.tableServers.size() == 0) { + if (this.tableServers.size() == 0) { throw new IOException("failed to scan " + META_TABLE_NAME + " after " + this.numRetries + " retries"); } @@ -976,7 +961,9 @@ printUsage(); break; } - } catch (Exception e) { + } catch (IOException e) { + e.printStackTrace(); + } catch (RuntimeException e) { e.printStackTrace(); } @@ -988,4 +975,4 @@ int errCode = (new HClient(c)).doCommandLine(args); System.exit(errCode); } -} \ No newline at end of file +} Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLog.java Wed May 23 08:14:00 2007 @@ -72,14 +72,13 @@ boolean insideCacheFlush = false; TreeMap<Text, Long> regionToLastFlush = new TreeMap<Text, Long>(); - long oldestOutstandingSeqNum = -1; boolean closed = false; transient long logSeqNum = 0; long filenum = 0; transient int numEntries = 0; - Integer rollLock = new Integer(0); + Integer rollLock = 0; /** * Bundle up a bunch of log files (which are no longer being written to), @@ -319,10 +318,10 @@ } int counter = 0; - for (Text column: columns.keySet()) { + for (Map.Entry<Text, BytesWritable> es: columns.entrySet()) { HLogKey logKey = new HLogKey(regionName, tableName, row, seqNum[counter++]); - HLogEdit logEdit = new HLogEdit(column, columns.get(column), timestamp); + HLogEdit logEdit = new HLogEdit(es.getKey(), es.getValue(), timestamp); writer.append(logKey, logEdit); numEntries++; } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogKey.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogKey.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogKey.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HLogKey.java Wed May 23 08:14:00 2007 @@ -72,6 +72,19 @@ return getTablename().toString() + " " + getRegionName().toString() + " " + getRow().toString() + " " + getLogSeqNum(); } + + @Override + public boolean equals(Object obj) { + return compareTo(obj) == 0; + } + + @Override + public int hashCode() { + int result = this.regionName.hashCode(); + result ^= this.row.hashCode(); + result ^= Long.valueOf(this.logSeqNum).hashCode(); + return result; + } ////////////////////////////////////////////////////////////////////////////// // Comparable Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Wed May 23 08:14:00 2007 @@ -294,9 +294,9 @@ private RootScanner rootScanner; private Thread rootScannerThread; - private Integer rootScannerLock = new Integer(0); + private Integer rootScannerLock = 0; - private class MetaRegion { + private static class MetaRegion { public HServerAddress server; public Text regionName; public Text startKey; @@ -426,7 +426,7 @@ private MetaScanner metaScanner; private Thread metaScannerThread; - private Integer metaScannerLock = new Integer(0); + private Integer metaScannerLock = 0; // The 'unassignedRegions' table maps from a region name to a HRegionInfo record, // which includes the region's table, its id, and its start/end keys. @@ -1136,7 +1136,6 @@ } } - scannerId = -1L; } // Remove server from root/meta entries @@ -1739,7 +1738,7 @@ private String server; public ServerExpirer(String server) { - this.server = new String(server); + this.server = server; } public void leaseExpired() { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java Wed May 23 08:14:00 2007 @@ -22,6 +22,7 @@ import java.io.*; import java.util.*; +import java.util.concurrent.locks.ReentrantReadWriteLock; /******************************************************************************* * The HMemcache holds in-memory modifications to the HRegion. This is really a @@ -38,9 +39,10 @@ TreeMap<HStoreKey, BytesWritable> snapshot = null; - HLocking lock = new HLocking(); + ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); public HMemcache() { + super(); } public static class Snapshot { @@ -48,6 +50,7 @@ public long sequenceId = 0; public Snapshot() { + super(); } } @@ -67,7 +70,7 @@ public Snapshot snapshotMemcacheForLog(HLog log) throws IOException { Snapshot retval = new Snapshot(); - this.lock.obtainWriteLock(); + this.lock.writeLock().lock(); try { if(snapshot != null) { throw new IOException("Snapshot in progress!"); @@ -96,7 +99,7 @@ return retval; } finally { - this.lock.releaseWriteLock(); + this.lock.writeLock().unlock(); } } @@ -106,7 +109,7 @@ * Modifying the structure means we need to obtain a writelock. */ public void deleteSnapshot() throws IOException { - this.lock.obtainWriteLock(); + this.lock.writeLock().lock(); try { if(snapshot == null) { @@ -132,7 +135,7 @@ } } finally { - this.lock.releaseWriteLock(); + this.lock.writeLock().unlock(); } } @@ -142,18 +145,14 @@ * Operation uses a write lock. */ public void add(Text row, TreeMap<Text, BytesWritable> columns, long timestamp) { - this.lock.obtainWriteLock(); + this.lock.writeLock().lock(); try { - for(Iterator<Text> it = columns.keySet().iterator(); it.hasNext(); ) { - Text column = it.next(); - BytesWritable val = columns.get(column); - - HStoreKey key = new HStoreKey(row, column, timestamp); - memcache.put(key, val); + for (Map.Entry<Text, BytesWritable> es: columns.entrySet()) { + HStoreKey key = new HStoreKey(row, es.getKey(), timestamp); + memcache.put(key, es.getValue()); } - } finally { - this.lock.releaseWriteLock(); + this.lock.writeLock().unlock(); } } @@ -164,7 +163,7 @@ */ public BytesWritable[] get(HStoreKey key, int numVersions) { Vector<BytesWritable> results = new Vector<BytesWritable>(); - this.lock.obtainReadLock(); + this.lock.readLock().lock(); try { Vector<BytesWritable> result = get(memcache, key, numVersions-results.size()); results.addAll(0, result); @@ -178,15 +177,10 @@ results.addAll(results.size(), result); } - if(results.size() == 0) { - return null; - - } else { - return results.toArray(new BytesWritable[results.size()]); - } - + return (results.size() == 0)? + null: results.toArray(new BytesWritable[results.size()]); } finally { - this.lock.releaseReadLock(); + this.lock.readLock().unlock(); } } @@ -196,9 +190,9 @@ * * The returned object should map column names to byte arrays (byte[]). */ - public TreeMap<Text, BytesWritable> getFull(HStoreKey key) throws IOException { + public TreeMap<Text, BytesWritable> getFull(HStoreKey key) { TreeMap<Text, BytesWritable> results = new TreeMap<Text, BytesWritable>(); - this.lock.obtainReadLock(); + this.lock.readLock().lock(); try { internalGetFull(memcache, key, results); for(int i = history.size()-1; i >= 0; i--) { @@ -208,25 +202,22 @@ return results; } finally { - this.lock.releaseReadLock(); + this.lock.readLock().unlock(); } } void internalGetFull(TreeMap<HStoreKey, BytesWritable> map, HStoreKey key, TreeMap<Text, BytesWritable> results) { - SortedMap<HStoreKey, BytesWritable> tailMap = map.tailMap(key); - - for(Iterator<HStoreKey> it = tailMap.keySet().iterator(); it.hasNext(); ) { - HStoreKey itKey = it.next(); + for (Map.Entry<HStoreKey, BytesWritable> es: tailMap.entrySet()) { + HStoreKey itKey = es.getKey(); Text itCol = itKey.getColumn(); - - if(results.get(itCol) == null + if (results.get(itCol) == null && key.matchesWithoutColumn(itKey)) { BytesWritable val = tailMap.get(itKey); results.put(itCol, val); - } else if(key.getRow().compareTo(itKey.getRow()) > 0) { + } else if (key.getRow().compareTo(itKey.getRow()) > 0) { break; } } @@ -246,15 +237,14 @@ HStoreKey curKey = new HStoreKey(key.getRow(), key.getColumn(), key.getTimestamp()); SortedMap<HStoreKey, BytesWritable> tailMap = map.tailMap(curKey); - for(Iterator<HStoreKey> it = tailMap.keySet().iterator(); it.hasNext(); ) { - HStoreKey itKey = it.next(); - - if(itKey.matchesRowCol(curKey)) { + for (Map.Entry<HStoreKey, BytesWritable> es: tailMap.entrySet()) { + HStoreKey itKey = es.getKey(); + if (itKey.matchesRowCol(curKey)) { result.add(tailMap.get(itKey)); curKey.setVersion(itKey.getTimestamp() - 1); } - if(numVersions > 0 && result.size() >= numVersions) { + if (numVersions > 0 && result.size() >= numVersions) { break; } } @@ -285,7 +275,7 @@ super(timestamp, targetCols); - lock.obtainReadLock(); + lock.readLock().lock(); try { this.backingMaps = new TreeMap[history.size() + 1]; @@ -377,7 +367,7 @@ } } finally { - lock.releaseReadLock(); + lock.readLock().unlock(); scannerClosed = true; } } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java Wed May 23 08:14:00 2007 @@ -171,12 +171,11 @@ LOG.debug("merging stores"); } - for(Iterator<Text> it = filesToMerge.keySet().iterator(); it.hasNext(); ) { - Text colFamily = it.next(); - Vector<HStoreFile> srcFiles = filesToMerge.get(colFamily); + for (Map.Entry<Text, Vector<HStoreFile>> es: filesToMerge.entrySet()) { + Text colFamily = es.getKey(); + Vector<HStoreFile> srcFiles = es.getValue(); HStoreFile dst = new HStoreFile(conf, merges, newRegionInfo.regionName, colFamily, Math.abs(rand.nextLong())); - dst.mergeStoreFiles(srcFiles, fs, conf); alreadyMerged.addAll(srcFiles); } @@ -226,12 +225,11 @@ LOG.debug("merging changes since start of merge"); } - for(Iterator<Text> it = filesToMerge.keySet().iterator(); it.hasNext(); ) { - Text colFamily = it.next(); - Vector<HStoreFile> srcFiles = filesToMerge.get(colFamily); - HStoreFile dst = new HStoreFile(conf, merges, newRegionInfo.regionName, - colFamily, Math.abs(rand.nextLong())); - + for (Map.Entry<Text, Vector<HStoreFile>> es : filesToMerge.entrySet()) { + Text colFamily = es.getKey(); + Vector<HStoreFile> srcFiles = es.getValue(); + HStoreFile dst = new HStoreFile(conf, merges, + newRegionInfo.regionName, colFamily, Math.abs(rand.nextLong())); dst.mergeStoreFiles(srcFiles, fs, conf); } @@ -268,7 +266,7 @@ HRegionInfo regionInfo; Path regiondir; - class WriteState { + static class WriteState { public volatile boolean writesOngoing; public volatile boolean writesEnabled; public volatile boolean closed; @@ -1248,7 +1246,7 @@ /******************************************************************************* * HScanner is an iterator through a bunch of rows in an HRegion. ******************************************************************************/ - private class HScanner implements HInternalScannerInterface { + private static class HScanner implements HInternalScannerInterface { private HInternalScannerInterface[] scanners; private TreeMap<Text, BytesWritable>[] resultSets; private HStoreKey[] keys; Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Wed May 23 08:14:00 2007 @@ -64,7 +64,7 @@ private long splitOrCompactCheckFrequency; private SplitOrCompactChecker splitOrCompactChecker; private Thread splitOrCompactCheckerThread; - private Integer splitOrCompactLock = new Integer(0); + private Integer splitOrCompactLock = 0; private class SplitOrCompactChecker implements Runnable, RegionUnavailableListener { private HClient client = new HClient(conf); @@ -222,7 +222,7 @@ private Flusher cacheFlusher; private Thread cacheFlusherThread; - private Integer cacheFlusherLock = new Integer(0); + private Integer cacheFlusherLock = 0; private class Flusher implements Runnable { public void run() { while(! stopRequested) { @@ -291,7 +291,7 @@ private HLog log; private LogRoller logRoller; private Thread logRollerThread; - private Integer logRollerLock = new Integer(0); + private Integer logRollerLock = 0; private class LogRoller implements Runnable { public void run() { while(! stopRequested) { @@ -388,7 +388,7 @@ try { // Server to handle client requests - this.server = RPC.getServer(this, address.getBindAddress().toString(), + this.server = RPC.getServer(this, address.getBindAddress(), address.getPort(), conf.getInt("hbase.regionserver.handler.count", 10), false, conf); @@ -509,10 +509,11 @@ if (LOG.isDebugEnabled()) { LOG.debug("Sleep"); } - synchronized(this) { + synchronized (this) { try { - Thread.sleep(waitTime); - } catch(InterruptedException iex) { + wait(waitTime); + } catch (InterruptedException e1) { + // Go back up to the while test if stop has been requested. } } } @@ -588,10 +589,11 @@ if (LOG.isDebugEnabled()) { LOG.debug("Sleep"); } - synchronized(this) { + synchronized (this) { try { - Thread.sleep(waitTime); + wait(waitTime); } catch(InterruptedException iex) { + // On interrupt we go around to the while test of stopRequested } } if (LOG.isDebugEnabled()) { @@ -927,10 +929,8 @@ TreeMap<Text, BytesWritable> map = region.getFull(row); LabelledData result[] = new LabelledData[map.size()]; int counter = 0; - for(Iterator<Text> it = map.keySet().iterator(); it.hasNext(); ) { - Text colname = it.next(); - BytesWritable val = map.get(colname); - result[counter++] = new LabelledData(colname, val); + for (Map.Entry<Text, BytesWritable> es: map.entrySet()) { + result[counter++] = new LabelledData(es.getKey(), es.getValue()); } return result; } @@ -939,7 +939,7 @@ * Start an update to the HBase. This also creates a lease associated with * the caller. */ - private class RegionListener extends LeaseListener { + private static class RegionListener extends LeaseListener { private HRegion localRegion; private long localLockId; @@ -1180,4 +1180,4 @@ printUsageAndExit(); } } -} \ No newline at end of file +} Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java Wed May 23 08:14:00 2007 @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -181,12 +182,14 @@ // Every line starts with row name followed by column name // followed by cell content. while(scanner.next(key, results)) { - for (Text colname: results.keySet()) { + for (Map.Entry<Text, BytesWritable> es: results.entrySet()) { + Text colname = es.getKey(); + BytesWritable colvalue = es.getValue(); Object value = null; - byte[] bytes = new byte[results.get(colname).getSize()]; + byte[] bytes = new byte[colvalue.getSize()]; if (colname.toString().equals("info:regioninfo")) { // Then bytes are instance of an HRegionInfo. - System.arraycopy(results.get(colname).get(), 0, bytes, 0, bytes.length); + System.arraycopy(colvalue, 0, bytes, 0, bytes.length); value = new HRegionInfo(bytes); } else { value = new String(bytes, HConstants.UTF8_ENCODING); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerAddress.java Wed May 23 08:14:00 2007 @@ -35,8 +35,8 @@ public HServerAddress(InetSocketAddress address) { this.address = address; - this.stringValue = new String(address.getAddress().getHostAddress() - + ":" + address.getPort()); + this.stringValue = address.getAddress().getHostAddress() + ":" + + address.getPort(); } public HServerAddress(String hostAndPort) { @@ -47,19 +47,19 @@ String host = hostAndPort.substring(0, colonIndex); int port = Integer.valueOf(hostAndPort.substring(colonIndex + 1)); this.address = new InetSocketAddress(host, port); - this.stringValue = new String(hostAndPort); + this.stringValue = hostAndPort; } public HServerAddress(String bindAddress, int port) { this.address = new InetSocketAddress(bindAddress, port); - this.stringValue = new String(bindAddress + ":" + port); + this.stringValue = bindAddress + ":" + port; } public HServerAddress(HServerAddress other) { String bindAddress = other.getBindAddress(); int port = other.getPort(); address = new InetSocketAddress(bindAddress, port); - stringValue = new String(bindAddress + ":" + port); + stringValue = bindAddress + ":" + port; } public String getBindAddress() { Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStore.java Wed May 23 08:14:00 2007 @@ -61,8 +61,8 @@ Path compactdir; Path loginfodir; - Integer compactLock = new Integer(0); - Integer flushLock = new Integer(0); + Integer compactLock = 0; + Integer flushLock = 0; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); @@ -225,13 +225,10 @@ if(LOG.isDebugEnabled()) { LOG.debug("starting map readers"); } - for(Iterator<Long> it = mapFiles.keySet().iterator(); it.hasNext(); ) { - Long key = it.next().longValue(); - HStoreFile hsf = mapFiles.get(key); - + for(Map.Entry<Long, HStoreFile> e: mapFiles.entrySet()) { // TODO - is this really necessary? Don't I do this inside compact()? - maps.put(key, - new MapFile.Reader(fs, hsf.getMapFilePath().toString(), conf)); + maps.put(e.getKey(), + new MapFile.Reader(fs, e.getValue().getMapFilePath().toString(), conf)); } LOG.info("HStore online for " + this.regionName + "/" + this.colFamily); @@ -239,19 +236,16 @@ /** Turn off all the MapFile readers */ public void close() throws IOException { - this.lock.writeLock().lock(); LOG.info("closing HStore for " + this.regionName + "/" + this.colFamily); - + this.lock.writeLock().lock(); try { - for(Iterator<MapFile.Reader> it = maps.values().iterator(); it.hasNext(); ) { - MapFile.Reader map = it.next(); + for (MapFile.Reader map: maps.values()) { map.close(); } maps.clear(); mapFiles.clear(); LOG.info("HStore closed for " + this.regionName + "/" + this.colFamily); - } finally { this.lock.writeLock().unlock(); } @@ -300,10 +294,10 @@ HStoreKey.class, BytesWritable.class); try { - for (HStoreKey curkey: inputCache.keySet()) { - if(this.colFamily.equals(HStoreKey.extractFamily(curkey.getColumn()))) { - BytesWritable val = inputCache.get(curkey); - out.append(curkey, val); + for (Map.Entry<HStoreKey, BytesWritable> es: inputCache.entrySet()) { + HStoreKey curkey = es.getKey(); + if (this.colFamily.equals(HStoreKey.extractFamily(curkey.getColumn()))) { + out.append(curkey, es.getValue()); } } if(LOG.isDebugEnabled()) { @@ -631,8 +625,9 @@ // 1. Acquiring the write-lock - this.lock.writeLock().lock(); + Path curCompactStore = HStoreFile.getHStoreDir(compactdir, regionName, colFamily); + this.lock.writeLock().lock(); try { Path doneFile = new Path(curCompactStore, COMPACTION_DONE); if(! fs.exists(doneFile)) { @@ -918,10 +913,10 @@ /** Generate a random unique filename suffix */ String obtainFileLabel(Path prefix) throws IOException { - String testsuffix = String.valueOf(Math.abs(rand.nextInt())); + String testsuffix = String.valueOf(rand.nextInt(Integer.MAX_VALUE)); Path testpath = new Path(prefix.toString() + testsuffix); while(fs.exists(testpath)) { - testsuffix = String.valueOf(Math.abs(rand.nextInt())); + testsuffix = String.valueOf(rand.nextInt(Integer.MAX_VALUE)); testpath = new Path(prefix.toString() + testsuffix); } return testsuffix; Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreFile.java Wed May 23 08:14:00 2007 @@ -326,6 +326,19 @@ out.close(); } } + + public boolean equals(Object o) { + return this.compareTo(o) == 0; + } + + @Override + public int hashCode() { + int result = this.dir.hashCode(); + result ^= this.regionName.hashCode(); + result ^= this.colFamily.hashCode(); + result ^= Long.valueOf(this.fileId).hashCode(); + return result; + } ////////////////////////////////////////////////////////////////////////////// // Writable @@ -368,11 +381,4 @@ } return result; } - - - public boolean equals(Object o) { - return this.compareTo(o) == 0; - } -} - - +} \ No newline at end of file Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HStoreKey.java Wed May 23 08:14:00 2007 @@ -15,6 +15,8 @@ */ package org.apache.hadoop.hbase; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.*; import java.io.*; @@ -23,6 +25,8 @@ * A Key for a stored row ******************************************************************************/ public class HStoreKey implements WritableComparable { + private final Log LOG = LogFactory.getLog(this.getClass().getName()); + public static Text extractFamily(Text col) throws IOException { String column = col.toString(); int colpos = column.indexOf(":"); @@ -128,12 +132,26 @@ extractFamily(other.getColumn())) == 0; } catch(IOException e) { + LOG.error(e); } return status; } public String toString() { return row.toString() + "/" + column.toString() + "/" + timestamp; + } + + @Override + public boolean equals(Object obj) { + return compareTo(obj) == 0; + } + + @Override + public int hashCode() { + int result = this.row.hashCode(); + result ^= this.column.hashCode(); + result ^= Long.valueOf(this.timestamp).hashCode(); + return result; } ////////////////////////////////////////////////////////////////////////////// Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Wed May 23 08:14:00 2007 @@ -124,6 +124,24 @@ ", maxVersions: " + this.maxVersions + ", families: " + this.families; } + @Override + public boolean equals(Object obj) { + return compareTo(obj) == 0; + } + + @Override + public int hashCode() { + // TODO: Cache. + int result = this.name.hashCode(); + result ^= Integer.valueOf(this.maxVersions).hashCode(); + if (this.families != null && this.families.size() > 0) { + for (Text family: this.families) { + result ^= family.hashCode(); + } + } + return result; + } + ////////////////////////////////////////////////////////////////////////////// // Writable ////////////////////////////////////////////////////////////////////////////// Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java?view=diff&rev=540973&r1=540972&r2=540973 ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java Wed May 23 08:14:00 2007 @@ -71,6 +71,7 @@ this.leaseMonitorThread.interrupt(); this.leaseMonitorThread.join(); } catch (InterruptedException iex) { + // Ignore } synchronized(leases) { synchronized(sortedLeases) { @@ -166,6 +167,7 @@ try { Thread.sleep(leaseCheckFrequency); } catch (InterruptedException ie) { + // Ignore } } } @@ -209,6 +211,18 @@ public void expired() { listener.leaseExpired(); + } + + @Override + public boolean equals(Object obj) { + return compareTo(obj) == 0; + } + + @Override + public int hashCode() { + int result = this.getLeaseId().hashCode(); + result ^= Long.valueOf(this.lastUpdate).hashCode(); + return result; } //////////////////////////////////////////////////////////////////////////////