Author: stack Date: Sun Jan 13 12:10:22 2008 New Revision: 611629 URL: http://svn.apache.org/viewvc?rev=611629&view=rev Log: HADOOP-2533 Scanning, just creating MapWritable in next consumes >20% CPU
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterRegionInterface.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original) +++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Sun Jan 13 12:10:22 2008 @@ -33,6 +33,11 @@ HADOOP-2479 Save on number of Text object creations HADOOP-2485 Make mapfile index interval configurable (Set default to 32 instead of 128) + HADOOP-2553 Don't make Long objects calculating hbase type hash codes + HADOOP-2377 Holding open MapFile.Readers is expensive, so use less of them + HADOOP-2407 Keeping MapFile.Reader open is expensive: Part 2 + HADOOP-2533 Performance: Scanning, just creating MapWritable in next + consumes >20% CPU BUG FIXES HADOOP-2059 In tests, exceptions in min dfs shutdown should not fail test @@ -104,8 +109,7 @@ deleted HADOOP-2468 TestRegionServerExit failed in Hadoop-Nightly #338 HADOOP-2467 scanner truncates resultset when > 1 column families - HADOOP-2503 REST Insert / Select encoding issue - (Bryan Duxbury via Stack) + HADOOP-2503 REST Insert / Select encoding issue (Bryan Duxbury via Stack) HADOOP-2505 formatter classes missing apache license HADOOP-2504 REST servlet method for deleting a scanner was not properly mapped (Bryan Duxbury via Stack) @@ -148,11 +152,9 @@ HADOOP-2299 Support inclusive scans (Bryan Duxbury via Stack) HADOOP-2333 Client side retries happen at the wrong level HADOOP-2357 Compaction cleanup; less deleting + prevent possible file leaks - HADOOP-2377 Holding open MapFile.Readers is expensive, so use less of them HADOOP-2392 TestRegionServerExit has new failure mode since HADOOP-2338 HADOOP-2370 Allow column families with an unlimited number of versions (Edward Yoon via Stack) - HADOOP-2407 Keeping MapFile.Reader open is expensive: Part 2 HADOOP-2047 Add an '--master=X' and '--html' command-line parameters to shell (Edward Yoon via Stack) HADOOP-2351 If select command returns no result, it doesn't need to show the @@ -167,7 +169,6 @@ (Edward Yoon via Stack) HADOOP-2450 Show version (and svn revision) in hbase web ui HADOOP-2472 Range selection using filter (Edward Yoon via Stack) - HADOOP-2553 Don't make Long objects calculating hbase type hash codes HADOOP-2548 Make TableMap and TableReduce generic (Frederik Hedberg via Stack) HADOOP-2557 Shell count function (Edward Yoon via Stack) Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HBaseAdmin.java Sun Jan 13 12:10:22 2008 @@ -28,7 +28,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; @@ -190,7 +190,7 @@ scannerId = server.openScanner(firstMetaServer.getRegionInfo().getRegionName(), COL_REGIONINFO_ARRAY, tableName, System.currentTimeMillis(), null); - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if (values == null || values.size() == 0) { break; } @@ -274,7 +274,7 @@ boolean isenabled = false; while (true) { - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if (values == null || values.size() == 0) { if (valuesfound == 0) { throw new NoSuchElementException( @@ -375,7 +375,7 @@ boolean disabled = false; while (true) { - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if (values == null || values.size() == 0) { if (valuesfound == 0) { throw new NoSuchElementException("table " + tableName + " not found"); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConnectionManager.java Sun Jan 13 12:10:22 2008 @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.ipc.HbaseRPC; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; @@ -259,7 +259,7 @@ null); while (true) { - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if (values == null || values.size() == 0) { break; } @@ -715,7 +715,7 @@ COLUMN_FAMILY_ARRAY, tableName, System.currentTimeMillis(), null); while (true) { - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if (values == null || values.size() == 0) { if (servers.size() == 0) { // If we didn't find any servers then the table does not exist Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Sun Jan 13 12:10:22 2008 @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Sleeper; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; @@ -224,7 +224,7 @@ int numberOfRegionsFound = 0; while (true) { SortedMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); - MapWritable values = regionServer.next(scannerId); + HbaseMapWritable values = regionServer.next(scannerId); if (values == null || values.size() == 0) { break; } @@ -1185,6 +1185,9 @@ if (LOG.isDebugEnabled()) { LOG.debug("Started service threads"); } + if (LOG.isDebugEnabled()) { + LOG.debug("Started service threads"); + } } /* @@ -1262,7 +1265,7 @@ /** [EMAIL PROTECTED] */ @SuppressWarnings("unused") - public MapWritable regionServerStartup(HServerInfo serverInfo) + public HbaseMapWritable regionServerStartup(HServerInfo serverInfo) throws IOException { String s = serverInfo.getServerAddress().toString().trim(); @@ -1315,12 +1318,12 @@ * @return Subset of configuration to pass initializing regionservers: e.g. * the filesystem to use and root directory to use. */ - protected MapWritable createConfigurationSubset() { - MapWritable mw = addConfig(new MapWritable(), HConstants.HBASE_DIR); + protected HbaseMapWritable createConfigurationSubset() { + HbaseMapWritable mw = addConfig(new HbaseMapWritable(), HConstants.HBASE_DIR); return addConfig(mw, "fs.default.name"); } - private MapWritable addConfig(final MapWritable mw, final String key) { + private HbaseMapWritable addConfig(final HbaseMapWritable mw, final String key) { mw.put(new Text(key), new Text(this.conf.get(key))); return mw; } @@ -1992,7 +1995,7 @@ try { while (true) { - MapWritable values = null; + HbaseMapWritable values = null; try { values = server.next(scannerId); } catch (IOException e) { @@ -2588,7 +2591,7 @@ long scannerid = server.openScanner(metaRegionName, COL_REGIONINFO_ARRAY, tableName, System.currentTimeMillis(), null); try { - MapWritable data = server.next(scannerid); + HbaseMapWritable data = server.next(scannerid); // Test data and that the row for the data is for our table. If table // does not exist, scanner will return row after where our table would @@ -2743,7 +2746,7 @@ String serverName = null; long startCode = -1L; - MapWritable values = server.next(scannerId); + HbaseMapWritable values = server.next(scannerId); if(values == null || values.size() == 0) { break; } Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterRegionInterface.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterRegionInterface.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterRegionInterface.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMasterRegionInterface.java Sun Jan 13 12:10:22 2008 @@ -21,7 +21,7 @@ import java.io.IOException; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.ipc.VersionedProtocol; /** @@ -39,7 +39,7 @@ * @return Configuration for the regionserver to use: e.g. filesystem, * hbase rootdir, etc. */ - public MapWritable regionServerStartup(HServerInfo info) throws IOException; + public HbaseMapWritable regionServerStartup(HServerInfo info) throws IOException; /** * Called to renew lease, tell master what the region server is doing and to Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionInterface.java Sun Jan 13 12:10:22 2008 @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.io.BatchUpdate; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.VersionedProtocol; @@ -96,7 +96,7 @@ * @return map of values * @throws IOException */ - public MapWritable getRow(final Text regionName, final Text row) + public HbaseMapWritable getRow(final Text regionName, final Text row) throws IOException; /** @@ -107,7 +107,7 @@ * @return map of values * @throws IOException */ - public MapWritable getRow(final Text regionName, final Text row, final long ts) + public HbaseMapWritable getRow(final Text regionName, final Text row, final long ts) throws IOException; @@ -192,7 +192,7 @@ * @return map of values * @throws IOException */ - public MapWritable next(long scannerId) throws IOException; + public HbaseMapWritable next(long scannerId) throws IOException; /** * Close a scanner Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Sun Jan 13 12:10:22 2008 @@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.Sleeper; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.Server; @@ -909,7 +909,7 @@ * Run init. Sets up hlog and starts up all server threads. * @param c Extra configuration. */ - private void init(final MapWritable c) throws IOException { + private void init(final HbaseMapWritable c) throws IOException { try { for (Map.Entry<Writable, Writable> e: c.entrySet()) { String key = e.getKey().toString(); @@ -1059,7 +1059,7 @@ * Let the master know we're here * Run initialization using parameters passed us by the master. */ - private MapWritable reportForDuty() throws IOException { + private HbaseMapWritable reportForDuty() throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Telling master at " + conf.get(MASTER_ADDRESS) + " that we are up"); @@ -1069,7 +1069,7 @@ HMasterRegionInterface.class, HMasterRegionInterface.versionID, new HServerAddress(conf.get(MASTER_ADDRESS)).getInetSocketAddress(), this.conf); - MapWritable result = null; + HbaseMapWritable result = null; long lastMsg = 0; while(!stopRequested.get()) { try { @@ -1375,20 +1375,20 @@ } /** [EMAIL PROTECTED] */ - public MapWritable getRow(final Text regionName, final Text row) + public HbaseMapWritable getRow(final Text regionName, final Text row) throws IOException { return getRow(regionName, row, HConstants.LATEST_TIMESTAMP); } /** [EMAIL PROTECTED] */ - public MapWritable getRow(final Text regionName, final Text row, final long ts) + public HbaseMapWritable getRow(final Text regionName, final Text row, final long ts) throws IOException { checkOpen(); requestCount.incrementAndGet(); try { HRegion region = getRegion(regionName); - MapWritable result = new MapWritable(); + HbaseMapWritable result = new HbaseMapWritable(); Map<Text, byte[]> map = region.getFull(row, ts); for (Map.Entry<Text, byte []> es: map.entrySet()) { result.put(new HStoreKey(row, es.getKey()), @@ -1404,7 +1404,7 @@ /** [EMAIL PROTECTED] */ - public MapWritable next(final long scannerId) throws IOException { + public HbaseMapWritable next(final long scannerId) throws IOException { checkOpen(); requestCount.incrementAndGet(); @@ -1417,7 +1417,7 @@ this.leases.renewLease(scannerId, scannerId); // Collect values to be returned here - MapWritable values = new MapWritable(); + HbaseMapWritable values = new HbaseMapWritable(); HStoreKey key = new HStoreKey(); TreeMap<Text, byte []> results = new TreeMap<Text, byte []>(); while (s.next(key, results)) { @@ -1445,7 +1445,6 @@ /** [EMAIL PROTECTED] */ public void batchUpdate(Text regionName, long timestamp, BatchUpdate b) throws IOException { - checkOpen(); this.requestCount.incrementAndGet(); HRegion region = getRegion(regionName); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java Sun Jan 13 12:10:22 2008 @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; @@ -366,7 +366,7 @@ */ public SortedMap<Text, byte[]> getRow(Text row, long ts) throws IOException { checkClosed(); - MapWritable value = null; + HbaseMapWritable value = null; for (int tries = 0; tries < numRetries; tries++) { HRegionLocation r = getRegionLocation(row); HRegionInterface server = @@ -1063,7 +1063,7 @@ if (this.closed) { return false; } - MapWritable values = null; + HbaseMapWritable values = null; // Clear the results so we don't inherit any values from any previous // calls to next. results.clear(); Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=611629&view=auto ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (added) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Sun Jan 13 12:10:22 2008 @@ -0,0 +1,204 @@ +/** + * Copyright 2008 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HStoreKey; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.util.ReflectionUtils; + +/** + * A Writable Map. + * Like [EMAIL PROTECTED] org.apache.hadoop.io.MapWritable} but dumb. It will fail + * if passed a Writable it has not already been told about. Its also been + * primed with hbase Writables. + */ +public class HbaseMapWritable implements Map<Writable, Writable>, Writable, + Configurable { + private AtomicReference<Configuration> conf = + new AtomicReference<Configuration>(); + + // Static maps of code to class and vice versa. Includes types used in hbase + // only. + static final Map<Byte, Class<? extends Writable>> CODE_TO_CLASS = + new HashMap<Byte, Class<? extends Writable>>(); + static final Map<Class<? extends Writable>, Byte> CLASS_TO_CODE = + new HashMap<Class<? extends Writable>, Byte>(); + + static { + byte code = 0; + addToMap(HStoreKey.class, code++); + addToMap(ImmutableBytesWritable.class, code++); + addToMap(Text.class, code++); + } + + @SuppressWarnings("boxing") + private static void addToMap(final Class<? extends Writable> clazz, + final byte code) { + CLASS_TO_CODE.put(clazz, code); + CODE_TO_CLASS.put(code, clazz); + } + + private Map<Writable, Writable> instance; + + /** Default constructor. */ + public HbaseMapWritable() { + super(); + this.instance = new HashMap<Writable, Writable>(); + } + + /** @return the conf */ + public Configuration getConf() { + return conf.get(); + } + + /** @param conf the conf to set */ + public void setConf(Configuration conf) { + this.conf.set(conf); + } + + /** [EMAIL PROTECTED] */ + public void clear() { + instance.clear(); + } + + /** [EMAIL PROTECTED] */ + public boolean containsKey(Object key) { + return instance.containsKey(key); + } + + /** [EMAIL PROTECTED] */ + public boolean containsValue(Object value) { + return instance.containsValue(value); + } + + /** [EMAIL PROTECTED] */ + public Set<Map.Entry<Writable, Writable>> entrySet() { + return instance.entrySet(); + } + + /** [EMAIL PROTECTED] */ + public Writable get(Object key) { + return instance.get(key); + } + + /** [EMAIL PROTECTED] */ + public boolean isEmpty() { + return instance.isEmpty(); + } + + /** [EMAIL PROTECTED] */ + public Set<Writable> keySet() { + return instance.keySet(); + } + + /** [EMAIL PROTECTED] */ + @SuppressWarnings("unchecked") + public Writable put(Writable key, Writable value) { + return instance.put(key, value); + } + + /** [EMAIL PROTECTED] */ + public void putAll(Map<? extends Writable, ? extends Writable> t) { + for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) { + instance.put(e.getKey(), e.getValue()); + } + } + + /** [EMAIL PROTECTED] */ + public Writable remove(Object key) { + return instance.remove(key); + } + + /** [EMAIL PROTECTED] */ + public int size() { + return instance.size(); + } + + /** [EMAIL PROTECTED] */ + public Collection<Writable> values() { + return instance.values(); + } + + // Writable + + /** @return the Class class for the specified id */ + @SuppressWarnings({ "unchecked", "boxing" }) + protected Class<?> getClass(byte id) { + return CODE_TO_CLASS.get(id); + } + + /** @return the id for the specified Class */ + @SuppressWarnings({ "unchecked", "boxing" }) + protected byte getId(Class<?> clazz) { + Byte b = CLASS_TO_CODE.get(clazz); + if (b == null) { + throw new NullPointerException("Nothing for : " + clazz); + } + return b; + } + + public void write(DataOutput out) throws IOException { + // Write out the number of entries in the map + out.writeInt(instance.size()); + + // Then write out each key/value pair + for (Map.Entry<Writable, Writable> e: instance.entrySet()) { + out.writeByte(getId(e.getKey().getClass())); + e.getKey().write(out); + out.writeByte(getId(e.getValue().getClass())); + e.getValue().write(out); + } + } + + public void readFields(DataInput in) throws IOException { + // First clear the map. Otherwise we will just accumulate + // entries every time this method is called. + this.instance.clear(); + + // Read the number of entries in the map + int entries = in.readInt(); + + // Then read each key/value pair + for (int i = 0; i < entries; i++) { + Writable key = (Writable) ReflectionUtils.newInstance(getClass( + in.readByte()), getConf()); + + key.readFields(in); + + Writable value = (Writable) ReflectionUtils.newInstance(getClass( + in.readByte()), getConf()); + + value.readFields(in); + instance.put(key, value); + } + } +} \ No newline at end of file Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Sun Jan 13 12:10:22 2008 @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterSet; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; @@ -91,7 +91,7 @@ // Hadoop types addToMap(Text.class, code++); addToMap(Writable.class, code++); - addToMap(MapWritable.class, code++); + addToMap(HbaseMapWritable.class, code++); addToMap(NullInstance.class, code++); try { addToMap(Class.forName("[Lorg.apache.hadoop.io.Text;"), code++); Modified: lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java?rev=611629&r1=611628&r2=611629&view=diff ============================================================================== --- lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java (original) +++ lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java Sun Jan 13 12:10:22 2008 @@ -38,10 +38,9 @@ import org.apache.hadoop.hbase.filter.RowFilterSet; import org.apache.hadoop.hbase.filter.StopRowFilter; import org.apache.hadoop.hbase.filter.WhileMatchRowFilter; -import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Writables; -import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; @@ -391,7 +390,7 @@ System.currentTimeMillis(), null); while (true) { TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>(); - MapWritable values = regionServer.next(scannerId); + HbaseMapWritable values = regionServer.next(scannerId); if (values == null || values.size() == 0) { break; }