Author: dhruba Date: Tue Jul 31 12:12:59 2007 New Revision: 561443 URL: http://svn.apache.org/viewvc?view=rev&rev=561443 Log: HADOOP-1647. FileSystem.getFileStatus returns valid values for "/". (Dhruba Borthakur via dhruba)
Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java (with props) Modified: lucene/hadoop/trunk/CHANGES.txt lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Modified: lucene/hadoop/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=561443&r1=561442&r2=561443 ============================================================================== --- lucene/hadoop/trunk/CHANGES.txt (original) +++ lucene/hadoop/trunk/CHANGES.txt Tue Jul 31 12:12:59 2007 @@ -427,6 +427,9 @@ 141. HADOOP-1551. libhdfs supports setting replication factor and retrieving modification time of files. (Sameer Paranjpye via dhruba) +141. HADOOP-1647. FileSystem.getFileStatus returns valid values for "/". + (Dhruba Borthakur via dhruba) + Release 0.13.0 - 2007-06-08 Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java?view=diff&rev=561443&r1=561442&r2=561443 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDirectory.java Tue Jul 31 12:12:59 2007 @@ -110,6 +110,9 @@ * @return the string representation of the absolute path of this file */ String getAbsoluteName() { + if (this.parent == null) { + return Path.SEPARATOR; // root directory is "/" + } return internalGetAbsolutePathName().toString(); } @@ -301,8 +304,6 @@ return total + 1; } - /** - */ long computeFileLength() { long total = 0; if (blocks != null) { Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java?view=auto&rev=561443 ============================================================================== --- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java (added) +++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java Tue Jul 31 12:12:59 2007 @@ -0,0 +1,119 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.dfs; + +import junit.framework.TestCase; +import java.io.*; +import java.util.Random; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FsShell; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.fs.FSDataOutputStream; + +/** + * This class tests the FileStatus API. + */ +public class TestFileStatus extends TestCase { + static final long seed = 0xDEADBEEFL; + static final int blockSize = 8192; + static final int fileSize = 16384; + + private static String TEST_ROOT_DIR = + new Path(System.getProperty("test.build.data","/tmp")) + .toString().replace(' ', '+'); + + private void writeFile(FileSystem fileSys, Path name, int repl) + throws IOException { + // create and write a file that contains three blocks of data + FSDataOutputStream stm = fileSys.create(name, true, + fileSys.getConf().getInt("io.file.buffer.size", 4096), + (short)repl, (long)blockSize); + byte[] buffer = new byte[fileSize]; + Random rand = new Random(seed); + rand.nextBytes(buffer); + stm.write(buffer); + stm.close(); + } + + private void checkFile(FileSystem fileSys, Path name, int repl) + throws IOException { + boolean done = false; + while (!done) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) {} + done = true; + String[][] locations = fileSys.getFileCacheHints(name, 0, fileSize); + for (int idx = 0; idx < locations.length; idx++) { + if (locations[idx].length < repl) { + done = false; + break; + } + } + } + } + + + /** + * Tests various options of DFSShell. + */ + public void testFileStatus() throws IOException { + Configuration conf = new Configuration(); + MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null); + FileSystem fs = cluster.getFileSystem(); + try { + + // + // check that / exists + // + Path path = new Path("/"); + System.out.println("Path : \"" + path.toString() + "\""); + System.out.println(fs.isDirectory(path)); + System.out.println(fs.getFileStatus(path).isDir()); + assertTrue("/ should be a directory", + fs.getFileStatus(path).isDir() == true); + + // create a file in home directory + // + Path file1 = new Path("filestatus.dat"); + writeFile(fs, file1, 1); + System.out.println("Created file filestatus.dat with one " + + " replicas."); + checkFile(fs, file1, 1); + assertTrue(file1 + " should be a file", + fs.getFileStatus(file1).isDir() == false); + System.out.println("Path : \"" + file1 + "\""); + + // create a directory + // + Path dir = new Path("/test/mkdirs"); + assertTrue(fs.mkdirs(dir)); + assertTrue(fs.exists(dir)); + assertTrue(dir + " should be a directory", + fs.getFileStatus(path).isDir() == true); + System.out.println("Dir : \"" + dir + "\""); + + } finally { + fs.close(); + cluster.shutdown(); + } + } +} Propchange: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java ------------------------------------------------------------------------------ svn:eol-style = native Propchange: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFileStatus.java ------------------------------------------------------------------------------ svn:keywords = Id Revision HeadURL