Author: cutting Date: Mon May 21 15:45:18 2007 New Revision: 540336 URL: http://svn.apache.org/viewvc?view=rev&rev=540336 Log: HADOOP-1387. A number of minor 'performance' code cleanups suggested by findbugs. Contributed by Arun.
Modified: lucene/hadoop/trunk/CHANGES.txt lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalDirAllocator.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp lucene/hadoop/trunk/src/webapps/job/jobdetails.jsp lucene/hadoop/trunk/src/webapps/job/jobdetailshistory.jsp lucene/hadoop/trunk/src/webapps/job/jobhistory.jsp Modified: lucene/hadoop/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/CHANGES.txt (original) +++ lucene/hadoop/trunk/CHANGES.txt Mon May 21 15:45:18 2007 @@ -42,6 +42,9 @@ 13. HADOOP-1393. Remove a potential unexpected negative number from uses of random number generator. (omalley via cutting) + 14. HADOOP-1387. A number of "performance" code-cleanups suggested + by findbugs. (Arun C Murthy via cutting) + Branch 0.13 (unreleased changes) Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalDirAllocator.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalDirAllocator.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalDirAllocator.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/LocalDirAllocator.java Mon May 21 15:45:18 2007 @@ -148,7 +148,7 @@ } } - private class AllocatorPerContext { + private static class AllocatorPerContext { private final Log LOG = LogFactory.getLog("org.apache.hadoop.fs.AllocatorPerContext"); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/MapFile.java Mon May 21 15:45:18 2007 @@ -193,8 +193,6 @@ private int seekIndex = -1; private long firstPosition; - private WritableComparable getKey; - // the data, on disk private SequenceFile.Reader data; private SequenceFile.Reader index; @@ -234,8 +232,6 @@ this.comparator = WritableComparator.get(data.getKeyClass()); else this.comparator = comparator; - - this.getKey = this.comparator.newKey(); // open the index this.index = new SequenceFile.Reader(fs, indexFile, conf); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Mon May 21 15:45:18 2007 @@ -188,19 +188,19 @@ if (declaredClass == Boolean.TYPE) { // boolean instance = Boolean.valueOf(in.readBoolean()); } else if (declaredClass == Character.TYPE) { // char - instance = new Character(in.readChar()); + instance = Character.valueOf(in.readChar()); } else if (declaredClass == Byte.TYPE) { // byte - instance = new Byte(in.readByte()); + instance = Byte.valueOf(in.readByte()); } else if (declaredClass == Short.TYPE) { // short - instance = new Short(in.readShort()); + instance = Short.valueOf(in.readShort()); } else if (declaredClass == Integer.TYPE) { // int - instance = new Integer(in.readInt()); + instance = Integer.valueOf(in.readInt()); } else if (declaredClass == Long.TYPE) { // long - instance = new Long(in.readLong()); + instance = Long.valueOf(in.readLong()); } else if (declaredClass == Float.TYPE) { // float - instance = new Float(in.readFloat()); + instance = Float.valueOf(in.readFloat()); } else if (declaredClass == Double.TYPE) { // double - instance = new Double(in.readDouble()); + instance = Double.valueOf(in.readDouble()); } else if (declaredClass == Void.TYPE) { // void instance = null; } else { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Mon May 21 15:45:18 2007 @@ -596,7 +596,6 @@ Configuration conf; FSDataOutputStream out; DataOutputBuffer buffer = new DataOutputBuffer(); - Path target = null; Class keyClass; Class valClass; @@ -690,7 +689,6 @@ Class keyClass, Class valClass, boolean compress, CompressionCodec codec, Metadata metadata) throws IOException { - this.target = name; this.conf = conf; this.out = out; this.keyClass = keyClass; Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressorStream.java Mon May 21 15:45:18 2007 @@ -28,11 +28,9 @@ Compressor compressor; byte[] buffer; boolean closed = false; - OutputStream rawOut; public CompressorStream(OutputStream out, Compressor compressor, int bufferSize) { super(out); - rawOut = out; if (out == null || compressor == null) { throw new NullPointerException(); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Client.java Mon May 21 15:45:18 2007 @@ -262,7 +262,7 @@ if (LOG.isDebugEnabled()) LOG.debug(getName() + " got value #" + id); - Call call = (Call)calls.remove(new Integer(id)); + Call call = (Call)calls.remove(id); boolean isError = in.readBoolean(); // read if error if (isError) { call.setResult(null, WritableUtils.readString(in), @@ -308,7 +308,7 @@ public void sendParam(Call call) throws IOException { boolean error = true; try { - calls.put(new Integer(call.id), call); + calls.put(call.id, call); synchronized (out) { if (LOG.isDebugEnabled()) LOG.debug(getName() + " sending #" + call.id); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/ipc/Server.java Mon May 21 15:45:18 2007 @@ -440,12 +440,6 @@ return lastContact; } - private boolean timedOut() { - if (System.currentTimeMillis() - lastContact > maxIdleTime) - return true; - return false; - } - private boolean timedOut(long currentTime) { if (currentTime - lastContact > maxIdleTime) return true; Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/BasicTypeSorterBase.java Mon May 21 15:45:18 2007 @@ -50,7 +50,7 @@ //12 => 4 for keyoffsets, 4 for keylengths, 4 for valueLengths, and //4 for indices into startOffsets array in the //pointers array (ignored the partpointers list itself) - private final int BUFFERED_KEY_VAL_OVERHEAD = 16; + static private final int BUFFERED_KEY_VAL_OVERHEAD = 16; //Implementation of methods of the SorterBase interface // @@ -176,7 +176,7 @@ //An implementation of the ValueBytes interface for the in-memory value //buffers. - private class InMemUncompressedBytes implements ValueBytes { + private static class InMemUncompressedBytes implements ValueBytes { private byte[] data; int start; int dataSize; Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Counters.java Mon May 21 15:45:18 2007 @@ -321,12 +321,14 @@ public synchronized void write(DataOutput out) throws IOException { out.writeInt(counters.size()); - for (String groupName : counters.keySet()) { + for (Map.Entry<String, Map<Integer, CounterRec>> e1 : counters.entrySet()) { + String groupName = e1.getKey(); + Map<Integer, CounterRec> map = e1.getValue(); UTF8.writeString(out, groupName); - Map<Integer,CounterRec> map = counters.get(groupName); out.writeInt(map.size()); - for (Integer ordinal : map.keySet()) { - CounterRec counter = map.get(ordinal); + for (Map.Entry<Integer, CounterRec> e2 : map.entrySet()) { + Integer ordinal = e2.getKey(); + CounterRec counter = e2.getValue(); out.writeInt(ordinal); UTF8.writeString(out, counter.name); out.writeLong(counter.value); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Mon May 21 15:45:18 2007 @@ -286,27 +286,25 @@ if ((tarchives != null) || (tfiles != null)) { // prepare these archives for md5 checksums if (tarchives != null) { - String md5Archives = StringUtils.byteToHexString(DistributedCache - .createMD5(tarchives[0], job)); + StringBuffer md5Archives = + new StringBuffer(StringUtils.byteToHexString(DistributedCache.createMD5(tarchives[0], job))); for (int i = 1; i < tarchives.length; i++) { - md5Archives = md5Archives - + "," - + StringUtils.byteToHexString(DistributedCache - .createMD5(tarchives[i], job)); + md5Archives.append(","); + md5Archives.append(StringUtils.byteToHexString(DistributedCache + .createMD5(tarchives[i], job))); } - DistributedCache.setArchiveMd5(job, md5Archives); + DistributedCache.setArchiveMd5(job, md5Archives.toString()); //job.set("mapred.cache.archivemd5", md5Archives); } if (tfiles != null) { - String md5Files = StringUtils.byteToHexString(DistributedCache - .createMD5(tfiles[0], job)); + StringBuffer md5Files = + new StringBuffer(StringUtils.byteToHexString(DistributedCache.createMD5(tfiles[0], job))); for (int i = 1; i < tfiles.length; i++) { - md5Files = md5Files - + "," - + StringUtils.byteToHexString(DistributedCache - .createMD5(tfiles[i], job)); + md5Files.append(","); + md5Files.append(StringUtils.byteToHexString(DistributedCache + .createMD5(tfiles[i], job))); } - DistributedCache.setFileMd5(job, md5Files); + DistributedCache.setFileMd5(job, md5Files.toString()); //"mapred.cache.filemd5", md5Files); } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java Mon May 21 15:45:18 2007 @@ -690,9 +690,9 @@ // find job that started more than one month back and remove them // for jobtracker instances which dont have a job in past one month - // remove the jobtracker start timestamp as well. - for (String jobTrackerId : jobTrackersToJobs.keySet()){ - Map<String, JobHistory.JobInfo> jobs = jobTrackersToJobs.get(jobTrackerId); + // remove the jobtracker start timestamp as well. + for (Map<String, JobHistory.JobInfo> jobs : + jobTrackersToJobs.values()) { for(Iterator iter = jobs.keySet().iterator(); iter.hasNext(); iter.next()){ JobHistory.JobInfo job = jobs.get(iter.next()); if (now - job.getLong(Keys.SUBMIT_TIME) > THIRTY_DAYS_IN_MS) { @@ -705,14 +705,16 @@ } masterIndex.close(); masterIndex = new PrintWriter(logFile); - // delete old history and write back to a new file - for (String jobTrackerId : jobTrackersToJobs.keySet()){ - Map<String, JobHistory.JobInfo> jobs = jobTrackersToJobs.get(jobTrackerId); + // delete old history and write back to a new file + for (Map.Entry<String, Map<String, JobHistory.JobInfo>> entry : + jobTrackersToJobs.entrySet()) { + String jobTrackerId = entry.getKey(); + Map<String, JobHistory.JobInfo> jobs = entry.getValue(); + log(masterIndex, RecordTypes.Jobtracker, Keys.START_TIME, jobTrackerId); - for(String jobId : jobs.keySet()){ - JobHistory.JobInfo job = jobs.get(jobId); + for(JobHistory.JobInfo job : jobs.values()){ Map<Keys, String> values = job.getValues(); log(masterIndex, RecordTypes.Job, Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobInProgress.java Mon May 21 15:45:18 2007 @@ -105,8 +105,6 @@ } private Counters jobCounters = new Counters(); - private Counters mapCounters = new Counters(); - private Counters reduceCounters = new Counters(); private MetricsRecord jobMetrics; /** @@ -563,7 +561,7 @@ Integer trackerFailures = trackerToFailuresMap.get(trackerHostName); if (trackerFailures == null) { - trackerFailures = new Integer(0); + trackerFailures = 0; } trackerToFailuresMap.put(trackerHostName, ++trackerFailures); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobProfile.java Mon May 21 15:45:18 2007 @@ -95,7 +95,7 @@ */ public URL getURL() { try { - return new URL(url.toString()); + return new URL(url); } catch (IOException ie) { return null; } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobTracker.java Mon May 21 15:45:18 2007 @@ -222,7 +222,7 @@ public void addNewTask(String taskName) { synchronized (launchingTasks) { launchingTasks.put(taskName, - new Long(System.currentTimeMillis())); + System.currentTimeMillis()); } } @@ -495,7 +495,6 @@ String localMachine; long startTime; int totalSubmissions = 0; - Random r = new Random(); private int maxCurrentTasks; private HostsFileReader hostsReader; @@ -667,7 +666,7 @@ // The rpc/web-server ports can be ephemeral ports... // ... ensure we have the correct info this.port = interTrackerServer.getListenerAddress().getPort(); - this.conf.set("mapred.job.tracker", new String(this.localMachine + ":" + this.port)); + this.conf.set("mapred.job.tracker", (this.localMachine + ":" + this.port)); LOG.info("JobTracker up at: " + this.port); this.infoPort = this.infoServer.getPort(); this.conf.setInt("mapred.job.tracker.info.port", this.infoPort); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapRunner.java Mon May 21 15:45:18 2007 @@ -26,11 +26,9 @@ /** Default [EMAIL PROTECTED] MapRunnable} implementation.*/ public class MapRunner implements MapRunnable { - private JobConf job; private Mapper mapper; public void configure(JobConf job) { - this.job = job; this.mapper = (Mapper)ReflectionUtils.newInstance(job.getMapperClass(), job); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/MapTask.java Mon May 21 15:45:18 2007 @@ -239,11 +239,8 @@ private JobConf job; - private TaskUmbilicalProtocol umbilical; - public DirectMapOutputCollector(TaskUmbilicalProtocol umbilical, JobConf job, Reporter reporter) throws IOException { - this.umbilical = umbilical; this.job = job; this.reporter = reporter; String finalName = getTipId(); @@ -274,7 +271,6 @@ private final int partitions; private Partitioner partitioner; - private TaskUmbilicalProtocol umbilical; private JobConf job; private Reporter reporter; @@ -304,7 +300,6 @@ maxBufferSize = job.getInt("io.sort.mb", 100) * 1024 * 1024; keyValBuffer = new DataOutputBuffer(); - this.umbilical = umbilical; this.job = job; this.reporter = reporter; this.comparator = job.getOutputKeyComparator(); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/ReduceTask.java Mon May 21 15:45:18 2007 @@ -839,7 +839,7 @@ probe_sample_size = Math.max(numCopiers*5, 50); for (int i = 0; i < numOutputs; i++) { - neededOutputs.add(new Integer(i)); + neededOutputs.add(i); copyPhase.addPhase(); // add sub-phase per file } @@ -975,7 +975,7 @@ currentTime = System.currentTimeMillis(); long nextContact = currentTime + 60 * 1000 + backoff.nextInt(maxBackoff*1000); - penaltyBox.put(cr.getHost(), new Long(nextContact)); + penaltyBox.put(cr.getHost(), nextContact); LOG.warn(reduceTask.getTaskId() + " adding host " + cr.getHost() + " to penalty box, next contact in " + ((nextContact-currentTime)/1000) + " seconds"); @@ -1131,7 +1131,7 @@ lastPollTime = currentTime; TaskCompletionEvent t[] = umbilical.getMapCompletionEvents( - reduceTask.getJobId().toString(), + reduceTask.getJobId(), fromEventId.get(), probe_sample_size); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/Task.java Mon May 21 15:45:18 2007 @@ -139,7 +139,7 @@ if (taskOutputPath != null) { Text.writeString(out, taskOutputPath.toString()); } else { - Text.writeString(out, new String("")); + Text.writeString(out, ""); } } public void readFields(DataInput in) throws IOException { @@ -159,7 +159,7 @@ public String toString() { return taskId; } private Path getTaskOutputPath(JobConf conf) { - return new Path(conf.getOutputPath(), new String("_" + taskId)); + return new Path(conf.getOutputPath(), ("_" + taskId)); } /** Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskInProgress.java Mon May 21 15:45:18 2007 @@ -633,7 +633,7 @@ // Create the 'taskid'; do not count the 'killed' tasks against the job! String taskid = null; if (nextTaskId < (MAX_TASK_EXECS + maxTaskAttempts + numKilledTasks)) { - taskid = new String("task_" + taskIdPrefix + "_" + nextTaskId); + taskid = "task_" + taskIdPrefix + "_" + nextTaskId; ++nextTaskId; } else { LOG.warn("Exceeded limit of " + (MAX_TASK_EXECS + maxTaskAttempts) + Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskLog.java Mon May 21 15:45:18 2007 @@ -82,7 +82,6 @@ */ static class Writer { private String taskId; - private JobConf conf; private LogFilter filter; private final File taskLogDir; @@ -245,8 +244,8 @@ } private synchronized void writeIndexRecord() throws IOException { - String indexRecord = new String(currentSplit + "|" + - splitOffset + "|" + splitLength + "\n"); + String indexRecord = currentSplit + "|" + splitOffset + "|" + + splitLength + "\n"; splitIndex.write(indexRecord.getBytes()); splitIndex.flush(); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Mon May 21 15:45:18 2007 @@ -91,11 +91,12 @@ if (p == null){ return null; } - String str = p[0].toString(); + StringBuffer str = new StringBuffer(p[0].toString()); for (int i = 1; i < p.length; i++){ - str = str + "," + p[i].toString(); + str.append(","); + str.append(p[i].toString()); } - return str; + return str.toString(); } public final void run() { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Mon May 21 15:45:18 2007 @@ -1864,7 +1864,7 @@ * @author Owen O'Malley */ public static class MapOutputServlet extends HttpServlet { - private final int MAX_BYTES_TO_READ = 64 * 1024; + private static final int MAX_BYTES_TO_READ = 64 * 1024; public void doGet(HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java Mon May 21 15:45:18 2007 @@ -208,7 +208,7 @@ int startPos = Integer.parseInt(start); int endPos = Integer.parseInt(end); for (j = startPos; j <= endPos; j++) { - fieldList.add(new Integer(j)); + fieldList.add(j); } } } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueHistogram.java Mon May 21 15:45:18 2007 @@ -59,9 +59,9 @@ long inc = Long.parseLong(countStr); if (count == null) { - count = new Long(inc); + count = inc; } else { - count = new Long(count.longValue() + inc); + count = count.longValue() + inc; } items.put(valStr, count); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java Mon May 21 15:45:18 2007 @@ -177,8 +177,8 @@ doArchive(String logListURI, String archiveDirectory) throws IOException { - String destURL = new String("hdfs://" + fsConfig.get("fs.default.name", "local") + - archiveDirectory); + String destURL = "hdfs://" + fsConfig.get("fs.default.name", "local") + + archiveDirectory; CopyFiles.copy(fsConfig, logListURI, destURL, true, false); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java Mon May 21 15:45:18 2007 @@ -159,12 +159,12 @@ * @param uris */ public static String uriToString(URI[] uris){ - String ret = null; - ret = uris[0].toString(); + StringBuffer ret = new StringBuffer(uris[0].toString()); for(int i = 1; i < uris.length;i++){ - ret = ret + "," + uris[i].toString(); + ret.append(","); + ret.append(uris[i].toString()); } - return ret; + return ret.toString(); } /** Modified: lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp (original) +++ lucene/hadoop/trunk/src/webapps/job/analysejobhistory.jsp Mon May 21 15:45:18 2007 @@ -82,18 +82,18 @@ } Comparator<JobHistory.Task> cMap = new Comparator<JobHistory.Task>(){ public int compare(JobHistory.Task t1, JobHistory.Task t2){ - Long l1 = new Long(t1.getLong(Keys.FINISH_TIME) - t1.getLong(Keys.START_TIME)); - Long l2 = new Long(t2.getLong(Keys.FINISH_TIME) - t2.getLong(Keys.START_TIME)) ; - return l2.compareTo(l1); + long l1 = t1.getLong(Keys.FINISH_TIME) - t1.getLong(Keys.START_TIME); + long l2 = t2.getLong(Keys.FINISH_TIME) - t2.getLong(Keys.START_TIME); + return (l2<l1 ? -1 : (l2==l1 ? 0 : 1)); } }; Comparator<JobHistory.Task> cShuffle = new Comparator<JobHistory.Task>(){ public int compare(JobHistory.Task t1, JobHistory.Task t2){ - Long l1 = new Long(t1.getLong(Keys.SHUFFLE_FINISHED) - - t1.getLong(Keys.START_TIME)); - Long l2 = new Long(t2.getLong(Keys.SHUFFLE_FINISHED) - - t2.getLong(Keys.START_TIME)) ; - return l2.compareTo(l1); + long l1 = t1.getLong(Keys.SHUFFLE_FINISHED) - + t1.getLong(Keys.START_TIME); + long l2 = t2.getLong(Keys.SHUFFLE_FINISHED) - + t2.getLong(Keys.START_TIME); + return (l2<l1 ? -1 : (l2==l1 ? 0 : 1)); } }; Arrays.sort(mapTasks, cMap); @@ -128,9 +128,9 @@ Comparator<JobHistory.Task> cFinishMapRed = new Comparator<JobHistory.Task>() { public int compare(JobHistory.Task t1, JobHistory.Task t2){ - Long l1 = new Long(t1.getLong(Keys.FINISH_TIME)); - Long l2 = new Long(t2.getLong(Keys.FINISH_TIME)); - return l2.compareTo(l1); + long l1 = t1.getLong(Keys.FINISH_TIME); + long l2 = t2.getLong(Keys.FINISH_TIME); + return (l2<l1 ? -1 : (l2==l1 ? 0 : 1)); } }; Arrays.sort(mapTasks, cFinishMapRed); @@ -175,9 +175,9 @@ Comparator<JobHistory.Task> cFinishShuffle = new Comparator<JobHistory.Task>() { public int compare(JobHistory.Task t1, JobHistory.Task t2){ - Long l1 = new Long(t1.getLong(Keys.SHUFFLE_FINISHED)); - Long l2 = new Long(t2.getLong(Keys.SHUFFLE_FINISHED)); - return l2.compareTo(l1); + long l1 = t1.getLong(Keys.SHUFFLE_FINISHED); + long l2 = t2.getLong(Keys.SHUFFLE_FINISHED); + return (l2<l1 ? -1 : (l2==l1 ? 0 : 1)); } }; Arrays.sort(reduceTasks, cFinishShuffle); @@ -195,11 +195,11 @@ <% Comparator<JobHistory.Task> cReduce = new Comparator<JobHistory.Task>(){ public int compare(JobHistory.Task t1, JobHistory.Task t2){ - Long l1 = new Long(t1.getLong(Keys.FINISH_TIME) - - t1.getLong(Keys.SHUFFLE_FINISHED)); - Long l2 = new Long(t2.getLong(Keys.FINISH_TIME) - - t2.getLong(Keys.SHUFFLE_FINISHED)); - return l2.compareTo(l1); + long l1 = t1.getLong(Keys.FINISH_TIME) - + t1.getLong(Keys.SHUFFLE_FINISHED); + long l2 = t2.getLong(Keys.FINISH_TIME) - + t2.getLong(Keys.SHUFFLE_FINISHED); + return (l2<l1 ? -1 : (l2==l1 ? 0 : 1)); } }; Arrays.sort(reduceTasks, cReduce); Modified: lucene/hadoop/trunk/src/webapps/job/jobdetails.jsp URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/jobdetails.jsp?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/webapps/job/jobdetails.jsp (original) +++ lucene/hadoop/trunk/src/webapps/job/jobdetails.jsp Mon May 21 15:45:18 2007 @@ -56,16 +56,16 @@ killedTasks + "</td><td align=\"right\">" + ((failedTaskAttempts > 0) ? - new String("<a href=\"/jobfailures.jsp?jobid=" + jobId + - "&kind=" + kind + "&cause=failed\">" + failedTaskAttempts + - "</a>") : + ("<a href=\"/jobfailures.jsp?jobid=" + jobId + + "&kind=" + kind + "&cause=failed\">" + failedTaskAttempts + + "</a>") : "0" ) + " / " + ((killedTaskAttempts > 0) ? - new String("<a href=\"/jobfailures.jsp?jobid=" + jobId + - "&kind=" + kind + "&cause=killed\">" + killedTaskAttempts + - "</a>") : + ("<a href=\"/jobfailures.jsp?jobid=" + jobId + + "&kind=" + kind + "&cause=killed\">" + killedTaskAttempts + + "</a>") : "0" ) + "</td></tr>\n"); Modified: lucene/hadoop/trunk/src/webapps/job/jobdetailshistory.jsp URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/jobdetailshistory.jsp?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/webapps/job/jobdetailshistory.jsp (original) +++ lucene/hadoop/trunk/src/webapps/job/jobdetailshistory.jsp Mon May 21 15:45:18 2007 @@ -122,8 +122,9 @@ <table border="1"> <tr><td>Hostname</td><td>Failed Tasks</td></tr> <% - for( String node : badNodes.keySet() ) { - Set<String> failedTasks = badNodes.get(node); + for (Map.Entry<String, Set<String>> entry : badNodes.entrySet()) { + String node = entry.getKey(); + Set<String> failedTasks = entry.getValue(); %> <tr> <td><%=node %></td> Modified: lucene/hadoop/trunk/src/webapps/job/jobhistory.jsp URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/job/jobhistory.jsp?view=diff&rev=540336&r1=540335&r2=540336 ============================================================================== --- lucene/hadoop/trunk/src/webapps/job/jobhistory.jsp (original) +++ lucene/hadoop/trunk/src/webapps/job/jobhistory.jsp Mon May 21 15:45:18 2007 @@ -26,8 +26,10 @@ return ; } - for(String trackerStartTime : jobTrackerToJobs.keySet() ){ - Map<String, JobInfo> jobs = (Map<String, JobInfo>)jobTrackerToJobs.get(trackerStartTime) ; + for (Map.Entry<String, Map<String, JobInfo>> entry : + jobTrackerToJobs.entrySet()) { + String trackerStartTime = entry.getKey(); + Map<String, JobInfo> jobs = entry.getValue(); %> <h2>JobTracker started at : <%=new Date(Long.parseLong(trackerStartTime)) %></h2> <hr/>