Author: tomwhite Date: Tue Apr 17 04:35:50 2007 New Revision: 529570 URL: http://svn.apache.org/viewvc?view=rev&rev=529570 Log: HADOOP-1190. Fix unchecked warnings in fs and io packages.
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/InMemoryFileSystem.java lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableFactories.java lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/InMemoryFileSystem.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/InMemoryFileSystem.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/InMemoryFileSystem.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/InMemoryFileSystem.java Tue Apr 17 04:35:50 2007 @@ -44,13 +44,15 @@ private Path staticWorkingDir; //pathToFileAttribs is the final place where a file is put after it is closed - private Map <String, FileAttributes> pathToFileAttribs = new HashMap(); + private Map<String, FileAttributes> pathToFileAttribs = + new HashMap<String, FileAttributes>(); //tempFileAttribs is a temp place which is updated while reserving memory for //files we are going to create. It is read in the createRaw method and the //temp key/value is discarded. If the file makes it to "close", then it //ends up being in the pathToFileAttribs map. - private Map <String, FileAttributes> tempFileAttribs = new HashMap(); + private Map<String, FileAttributes> tempFileAttribs = + new HashMap<String, FileAttributes>(); public RawInMemoryFileSystem() { setConf(new Configuration()); Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/RawLocalFileSystem.java Tue Apr 17 04:35:50 2007 @@ -36,9 +36,11 @@ static final URI NAME = URI.create("file:///"); private Path workingDir = new Path(System.getProperty("user.dir")); - TreeMap sharedLockDataSet = new TreeMap(); - TreeMap nonsharedLockDataSet = new TreeMap(); - TreeMap lockObjSet = new TreeMap(); + TreeMap<File, FileInputStream> sharedLockDataSet = + new TreeMap<File, FileInputStream>(); + TreeMap<File, FileOutputStream> nonsharedLockDataSet = + new TreeMap<File, FileOutputStream>(); + TreeMap<File, FileLock> lockObjSet = new TreeMap<File, FileLock>(); // by default use copy/delete instead of rename boolean useCopyForRename = true; @@ -308,9 +310,9 @@ FileInputStream sharedLockData; FileOutputStream nonsharedLockData; synchronized (this) { - lockObj = (FileLock) lockObjSet.remove(f); - sharedLockData = (FileInputStream) sharedLockDataSet.remove(f); - nonsharedLockData = (FileOutputStream) nonsharedLockDataSet.remove(f); + lockObj = lockObjSet.remove(f); + sharedLockData = sharedLockDataSet.remove(f); + nonsharedLockData = nonsharedLockDataSet.remove(f); } if (lockObj == null) { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/ObjectWritable.java Tue Apr 17 04:35:50 2007 @@ -65,7 +65,7 @@ writeObject(out, instance, declaredClass, conf); } - private static final Map PRIMITIVE_NAMES = new HashMap(); + private static final Map<String, Class<?>> PRIMITIVE_NAMES = new HashMap<String, Class<?>>(); static { PRIMITIVE_NAMES.put("boolean", Boolean.TYPE); PRIMITIVE_NAMES.put("byte", Byte.TYPE); @@ -79,7 +79,7 @@ } private static class NullInstance extends Configured implements Writable { - private Class declaredClass; + private Class<?> declaredClass; public NullInstance() { super(null); } public NullInstance(Class declaredClass, Configuration conf) { super(conf); @@ -87,7 +87,7 @@ } public void readFields(DataInput in) throws IOException { String className = UTF8.readString(in); - declaredClass = (Class)PRIMITIVE_NAMES.get(className); + declaredClass = PRIMITIVE_NAMES.get(className); if (declaredClass == null) { try { declaredClass = getConf().getClassByName(className); @@ -168,10 +168,11 @@ /** Read a [EMAIL PROTECTED] Writable}, [EMAIL PROTECTED] String}, primitive type, or an array of * the preceding. */ + @SuppressWarnings("unchecked") public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf) throws IOException { String className = UTF8.readString(in); - Class declaredClass = (Class)PRIMITIVE_NAMES.get(className); + Class<?> declaredClass = PRIMITIVE_NAMES.get(className); if (declaredClass == null) { try { declaredClass = conf.getClassByName(className); @@ -216,7 +217,7 @@ } else if (declaredClass == String.class) { // String instance = UTF8.readString(in); } else if( declaredClass.isEnum() ) { // enum - instance = Enum.valueOf( declaredClass, UTF8.readString(in) ); + instance = Enum.valueOf( (Class<? extends Enum>) declaredClass, UTF8.readString(in) ); } else { // Writable Class instanceClass = null; try { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Tue Apr 17 04:35:50 2007 @@ -530,9 +530,10 @@ public void write(DataOutput out) throws IOException { out.writeInt(this.theMetadata.size()); - Iterator iter = this.theMetadata.entrySet().iterator(); + Iterator<Map.Entry<Text, Text>> iter = + this.theMetadata.entrySet().iterator(); while (iter.hasNext()) { - Map.Entry<Text, Text> en = (Map.Entry<Text, Text>)iter.next(); + Map.Entry<Text, Text> en = iter.next(); en.getKey().write(out); en.getValue().write(out); } @@ -556,11 +557,13 @@ if (this.theMetadata.size() != other.theMetadata.size()) { return false; } - Iterator iter1 = this.theMetadata.entrySet().iterator(); - Iterator iter2 = other.theMetadata.entrySet().iterator(); + Iterator<Map.Entry<Text, Text>> iter1 = + this.theMetadata.entrySet().iterator(); + Iterator<Map.Entry<Text, Text>> iter2 = + other.theMetadata.entrySet().iterator(); while (iter1.hasNext() && iter2.hasNext()) { - Map.Entry<Text, Text> en1 = (Map.Entry<Text, Text>)iter1.next(); - Map.Entry<Text, Text> en2 = (Map.Entry<Text, Text>)iter2.next(); + Map.Entry<Text, Text> en1 = iter1.next(); + Map.Entry<Text, Text> en2 = iter2.next(); if (!en1.getKey().equals(en2.getKey())) { return false; } @@ -577,9 +580,10 @@ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("size: ").append(this.theMetadata.size()).append("\n"); - Iterator iter = this.theMetadata.entrySet().iterator(); + Iterator<Map.Entry<Text, Text>> iter = + this.theMetadata.entrySet().iterator(); while (iter.hasNext()) { - Map.Entry<Text, Text> en = (Map.Entry<Text, Text>)iter.next(); + Map.Entry<Text, Text> en = iter.next(); sb.append("\t").append(en.getKey().toString()).append("\t").append(en.getValue().toString()); sb.append("\n"); } @@ -2293,7 +2297,8 @@ //a TreeMap used to store the segments sorted by size (segment offset and //segment path name is used to break ties between segments of same sizes) - private Map <SegmentDescriptor, Void> sortedSegmentSizes = new TreeMap(); + private Map<SegmentDescriptor, Void> sortedSegmentSizes = + new TreeMap<SegmentDescriptor, Void>(); public void put(SegmentDescriptor stream) throws IOException { if (size() == 0) { @@ -2381,7 +2386,8 @@ do { //get the factor for this pass of merge factor = getPassFactor(passNo, numSegments); - List <SegmentDescriptor> segmentsToMerge = new ArrayList(); + List<SegmentDescriptor> segmentsToMerge = + new ArrayList<SegmentDescriptor>(); int segmentsConsidered = 0; int numSegmentsToConsider = factor; while (true) { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableComparator.java Tue Apr 17 04:35:50 2007 @@ -32,11 +32,12 @@ */ public class WritableComparator implements Comparator { - private static HashMap comparators = new HashMap(); // registry + private static HashMap<Class, WritableComparator> comparators = + new HashMap<Class, WritableComparator>(); // registry /** Get a comparator for a [EMAIL PROTECTED] WritableComparable} implementation. */ public static synchronized WritableComparator get(Class c) { - WritableComparator comparator = (WritableComparator)comparators.get(c); + WritableComparator comparator = comparators.get(c); if (comparator == null) comparator = new WritableComparator(c); return comparator; @@ -103,6 +104,7 @@ * * <p> The default implementation uses the natural ordering, calling [EMAIL PROTECTED] * Comparable#compareTo(Object)}. */ + @SuppressWarnings("unchecked") public int compare(WritableComparable a, WritableComparable b) { return a.compareTo(b); } Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableFactories.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableFactories.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableFactories.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableFactories.java Tue Apr 17 04:35:50 2007 @@ -25,7 +25,8 @@ /** Factories for non-public writables. Defining a factory permits [EMAIL PROTECTED] * ObjectWritable} to be able to construct instances of non-public classes. */ public class WritableFactories { - private static final HashMap CLASS_TO_FACTORY = new HashMap(); + private static final HashMap<Class, WritableFactory> CLASS_TO_FACTORY = + new HashMap<Class, WritableFactory>(); private WritableFactories() {} // singleton @@ -36,7 +37,7 @@ /** Define a factory for a class. */ public static synchronized WritableFactory getFactory(Class c) { - return (WritableFactory)CLASS_TO_FACTORY.get(c); + return CLASS_TO_FACTORY.get(c); } /** Create a new instance of a class with a defined factory. */ Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java?view=diff&rev=529570&r1=529569&r2=529570 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableName.java Tue Apr 17 04:35:50 2007 @@ -28,8 +28,10 @@ * @author Doug Cutting */ public class WritableName { - private static HashMap NAME_TO_CLASS = new HashMap(); - private static HashMap CLASS_TO_NAME = new HashMap(); + private static HashMap<String, Class> NAME_TO_CLASS = + new HashMap<String, Class>(); + private static HashMap<Class, String> CLASS_TO_NAME = + new HashMap<Class, String>(); static { // define important types WritableName.setName(NullWritable.class, "null"); @@ -54,7 +56,7 @@ /** Return the name for a class. Default is [EMAIL PROTECTED] Class#getName()}. */ public static synchronized String getName(Class writableClass) { - String name = (String)CLASS_TO_NAME.get(writableClass); + String name = CLASS_TO_NAME.get(writableClass); if (name != null) return name; return writableClass.getName(); @@ -64,7 +66,7 @@ public static synchronized Class getClass(String name, Configuration conf ) throws IOException { - Class writableClass = (Class)NAME_TO_CLASS.get(name); + Class writableClass = NAME_TO_CLASS.get(name); if (writableClass != null) return writableClass; try {