Author: szetszwo
Date: Thu Jan 8 13:43:18 2009
New Revision: 732833
URL: http://svn.apache.org/viewvc?rev=732833&view=rev
Log:
HADOOP-4940. Remove a deprecated FileSystem.delete(Path f). (Enis Soztutar via
szetszwo)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapred/GridMixRunner.java
hadoop/core/trunk/src/c++/libhdfs/hdfs.c
hadoop/core/trunk/src/c++/libhdfs/hdfs.h
hadoop/core/trunk/src/c++/libhdfs/hdfs_test.c
hadoop/core/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java
hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
hadoop/core/trunk/src/contrib/fuse-dfs/src/fuse_trash.c
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/FileSystemDirectory.java
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/ShardWriter.java
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestDistributionPolicy.java
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestIndexUpdater.java
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/lib/TestTotalOrderPartitioner.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Jan 8 13:43:18 2009
@@ -13,6 +13,9 @@
HADOOP-4648. Remove obsolete, deprecated InMemoryFileSystem and
ChecksumDistributedFileSystem. (cdouglas via szetszwo)
+ HADOOP-4940. Remove a deprecated method FileSystem.delete(Path f). (Enis
+ Soztutar via szetszwo)
+
NEW FEATURES
HADOOP-4268. Change fsck to use ClientProtocol methods so that the
Modified:
hadoop/core/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapred/GridMixRunner.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapred/GridMixRunner.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapred/GridMixRunner.java
(original)
+++
hadoop/core/trunk/src/benchmarks/gridmix2/src/java/org/apache/hadoop/mapred/GridMixRunner.java
Thu Jan 8 13:43:18 2009
@@ -255,7 +255,7 @@
String[] args = sb.toString().split(" ");
try {
- fs.delete(outfile);
+ fs.delete(outfile, true);
} catch (IOException ex) {
System.out.println(ex.toString());
}
@@ -377,7 +377,7 @@
private static void clearDir(String dir) {
try {
Path outfile = new Path(dir);
- fs.delete(outfile);
+ fs.delete(outfile, true);
} catch (IOException ex) {
ex.printStackTrace();
System.out.println("delete file error:");
Modified: hadoop/core/trunk/src/c++/libhdfs/hdfs.c
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/c%2B%2B/libhdfs/hdfs.c?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/c++/libhdfs/hdfs.c (original)
+++ hadoop/core/trunk/src/c++/libhdfs/hdfs.c Thu Jan 8 13:43:18 2009
@@ -1090,7 +1090,7 @@
-int hdfsDelete(hdfsFS fs, const char* path)
+int hdfsDelete(hdfsFS fs, const char* path, int recursive)
{
// JAVA EQUIVALENT:
// File f = new File(path);
@@ -1114,9 +1114,10 @@
//Delete the file
jvalue jVal;
jthrowable jExc = NULL;
+ jboolean jRecursive = recursive;
if (invokeMethod(env, &jVal, &jExc, INSTANCE, jFS, HADOOP_FS,
- "delete", "(Lorg/apache/hadoop/fs/Path;)Z",
- jPath) != 0) {
+ "delete", "(Lorg/apache/hadoop/fs/Path;Z)Z",
+ jPath, jRecursive) != 0) {
errno = errnoFromException(jExc, env, "org.apache.hadoop.fs."
"FileSystem::delete");
return -1;
Modified: hadoop/core/trunk/src/c++/libhdfs/hdfs.h
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/c%2B%2B/libhdfs/hdfs.h?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/c++/libhdfs/hdfs.h (original)
+++ hadoop/core/trunk/src/c++/libhdfs/hdfs.h Thu Jan 8 13:43:18 2009
@@ -268,10 +268,12 @@
* hdfsDelete - Delete file.
* @param fs The configured filesystem handle.
* @param path The path of the file.
+ * @param recursive if path is a directory and set to
+ * non-zero, the directory is deleted else throws an exception. In
+ * case of a file the recursive argument is irrelevant.
* @return Returns 0 on success, -1 on error.
*/
- int hdfsDelete(hdfsFS fs, const char* path);
-
+ int hdfsDelete(hdfsFS fs, const char* path, int recursive);
/**
* hdfsRename - Rename file.
Modified: hadoop/core/trunk/src/c++/libhdfs/hdfs_test.c
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/c%2B%2B/libhdfs/hdfs_test.c?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/c++/libhdfs/hdfs_test.c (original)
+++ hadoop/core/trunk/src/c++/libhdfs/hdfs_test.c Thu Jan 8 13:43:18 2009
@@ -305,13 +305,13 @@
hdfsFreeFileInfo(finfo, 1);
// Clean up
- fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(fs,
newDirectory)) ? "Failed!" : "Success!"));
+ fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(fs,
newDirectory, 1)) ? "Failed!" : "Success!"));
totalResult += result;
- fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(fs,
srcPath)) ? "Failed!" : "Success!"));
+ fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(fs, srcPath,
1)) ? "Failed!" : "Success!"));
totalResult += result;
- fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(lfs,
srcPath)) ? "Failed!" : "Success!"));
+ fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(lfs,
srcPath, 1)) ? "Failed!" : "Success!"));
totalResult += result;
- fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(lfs,
dstPath)) ? "Failed!" : "Success!"));
+ fprintf(stderr, "hdfsDelete: %s\n", ((result = hdfsDelete(lfs,
dstPath, 1)) ? "Failed!" : "Success!"));
totalResult += result;
fprintf(stderr, "hdfsExists: %s\n", ((result = hdfsExists(fs,
newDirectory)) ? "Success!" : "Failed!"));
totalResult += (result ? 0 : 1);
Modified:
hadoop/core/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java
(original)
+++
hadoop/core/trunk/src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/DataJoinJob.java
Thu Jan 8 13:43:18 2009
@@ -93,7 +93,7 @@
job.setJobName("DataJoinJob: " + jobName);
FileSystem fs = FileSystem.get(defaults);
- fs.delete(new Path(outputDir));
+ fs.delete(new Path(outputDir), true);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormat(inputFormat);
Modified:
hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
(original)
+++
hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
Thu Jan 8 13:43:18 2009
@@ -105,7 +105,7 @@
for (FileStatus fstatus : inputFiles) {
appendFile(fstatus.getPath());
- hdfs.delete(fstatus.getPath());
+ hdfs.delete(fstatus.getPath(), true);
}
outputFile.close();
Modified: hadoop/core/trunk/src/contrib/fuse-dfs/src/fuse_trash.c
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fuse-dfs/src/fuse_trash.c?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fuse-dfs/src/fuse_trash.c (original)
+++ hadoop/core/trunk/src/contrib/fuse-dfs/src/fuse_trash.c Thu Jan 8 13:43:18
2009
@@ -116,7 +116,7 @@
return ret;
}
- if (hdfsDelete(userFS, path)) {
+ if (hdfsDelete(userFS, path, 1)) {
syslog(LOG_ERR,"ERROR: hdfs trying to delete the file %s",path);
return -EIO;
}
Modified:
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/FileSystemDirectory.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/FileSystemDirectory.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/FileSystemDirectory.java
(original)
+++
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/FileSystemDirectory.java
Thu Jan 8 13:43:18 2009
@@ -98,7 +98,7 @@
FileStatus[] fileStatus =
fs.listStatus(directory, LuceneIndexFileNameFilter.getFilter());
for (int i = 0; i < fileStatus.length; i++) {
- if (!fs.delete(fileStatus[i].getPath())) {
+ if (!fs.delete(fileStatus[i].getPath(), true)) {
throw new IOException("Cannot delete index file "
+ fileStatus[i].getPath());
}
@@ -150,7 +150,7 @@
* @see org.apache.lucene.store.Directory#deleteFile(java.lang.String)
*/
public void deleteFile(String name) throws IOException {
- if (!fs.delete(new Path(directory, name))) {
+ if (!fs.delete(new Path(directory, name), true)) {
throw new IOException("Cannot delete index file " + name);
}
}
@@ -167,7 +167,7 @@
*/
public IndexOutput createOutput(String name) throws IOException {
Path file = new Path(directory, name);
- if (fs.exists(file) && !fs.delete(file)) {
+ if (fs.exists(file) && !fs.delete(file, true)) {
// delete the existing one if applicable
throw new IOException("Cannot overwrite index file " + file);
}
Modified:
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/ShardWriter.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/ShardWriter.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/ShardWriter.java
(original)
+++
hadoop/core/trunk/src/contrib/index/src/java/org/apache/hadoop/contrib/index/lucene/ShardWriter.java
Thu Jan 8 13:43:18 2009
@@ -176,7 +176,7 @@
for (int i = 0; i < fileStatus.length; i++) {
Path path = fileStatus[i].getPath();
if (startGen <
LuceneUtil.generationFromSegmentsFileName(path.getName())) {
- fs.delete(path);
+ fs.delete(path, true);
}
}
@@ -184,7 +184,7 @@
// but not segments.gen, and segments.gen will be overwritten anyway.
Path segmentsGenFile = new Path(LuceneUtil.IndexFileNames.SEGMENTS_GEN);
if (fs.exists(segmentsGenFile)) {
- fs.delete(segmentsGenFile);
+ fs.delete(segmentsGenFile, true);
}
}
@@ -226,7 +226,7 @@
}
} finally {
// finally delete the temp dir (files should have been deleted)
- localFs.delete(temp);
+ localFs.delete(temp, true);
}
}
Modified:
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestDistributionPolicy.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestDistributionPolicy.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestDistributionPolicy.java
(original)
+++
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestDistributionPolicy.java
Thu Jan 8 13:43:18 2009
@@ -88,21 +88,21 @@
fs = dfsCluster.getFileSystem();
if (fs.exists(inputPath)) {
- fs.delete(inputPath);
+ fs.delete(inputPath, true);
}
fs.copyFromLocalFile(localInputPath, inputPath);
if (fs.exists(updatePath)) {
- fs.delete(updatePath);
+ fs.delete(updatePath, true);
}
fs.copyFromLocalFile(localUpdatePath, updatePath);
if (fs.exists(outputPath)) {
// do not create, mapred will create
- fs.delete(outputPath);
+ fs.delete(outputPath, true);
}
if (fs.exists(indexPath)) {
- fs.delete(indexPath);
+ fs.delete(indexPath, true);
}
mrCluster =
@@ -156,7 +156,7 @@
onetest();
if (fs.exists(indexPath)) {
- fs.delete(indexPath);
+ fs.delete(indexPath, true);
}
// test round-robin distribution policy
@@ -177,7 +177,7 @@
}
if (fs.exists(outputPath)) {
- fs.delete(outputPath);
+ fs.delete(outputPath, true);
}
IIndexUpdater updater = new IndexUpdater();
@@ -185,7 +185,7 @@
shards);
if (fs.exists(outputPath)) {
- fs.delete(outputPath);
+ fs.delete(outputPath, true);
}
// delete docs w/ even docids, update docs w/ odd docids
Modified:
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestIndexUpdater.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestIndexUpdater.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestIndexUpdater.java
(original)
+++
hadoop/core/trunk/src/contrib/index/src/test/org/apache/hadoop/contrib/index/mapred/TestIndexUpdater.java
Thu Jan 8 13:43:18 2009
@@ -88,17 +88,17 @@
fs = dfsCluster.getFileSystem();
if (fs.exists(inputPath)) {
- fs.delete(inputPath);
+ fs.delete(inputPath, true);
}
fs.copyFromLocalFile(localInputPath, inputPath);
if (fs.exists(outputPath)) {
// do not create, mapred will create
- fs.delete(outputPath);
+ fs.delete(outputPath, true);
}
if (fs.exists(indexPath)) {
- fs.delete(indexPath);
+ fs.delete(indexPath, true);
}
mrCluster =
@@ -157,7 +157,7 @@
for (int i = 0; i < numRuns; i++) {
if (fs.exists(outputPath)) {
- fs.delete(outputPath);
+ fs.delete(outputPath, true);
}
Shard[] shards = new Shard[initNumShards + i];
Modified:
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
Thu Jan 8 13:43:18 2009
@@ -18,15 +18,22 @@
package org.apache.hadoop.streaming;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.*;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.mapred.OutputLogFilter;
/**
* This test case tests the symlink creation
* utility provided by distributed caching
@@ -83,7 +90,7 @@
"-cacheFile", "hdfs://"+fileSys.getName()+CACHE_FILE_2 + "#" +
mapString2
};
- fileSys.delete(new Path(OUTPUT_DIR));
+ fileSys.delete(new Path(OUTPUT_DIR), true);
DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
file.writeBytes(mapString + "\n");
Modified:
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
Thu Jan 8 13:43:18 2009
@@ -31,10 +31,10 @@
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ToolRunner;
@@ -71,7 +71,7 @@
}
String createInputs(boolean tag) throws IOException {
- fs_.delete(new Path("/input/"));
+ fs_.delete(new Path("/input/"), true);
// i18n() replaces some ASCII with multibyte UTF-8 chars
addInput("/input/part-00", i18n("k1\tv1\n" + "k3\tv5\n"));
@@ -192,7 +192,7 @@
public void run() {
try {
in_ = connectInputStream();
- LineReader lineReader = new LineReader((InputStream)in_, conf_);
+ LineReader lineReader = new LineReader(in_, conf_);
Text line = new Text();
while (lineReader.readLine(line) > 0) {
buf_.append(line.toString());
Modified:
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
(original)
+++
hadoop/core/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
Thu Jan 8 13:43:18 2009
@@ -18,15 +18,22 @@
package org.apache.hadoop.streaming;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
import junit.framework.TestCase;
-import java.io.*;
-import java.util.*;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.*;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.mapred.OutputLogFilter;
/**
* This test case tests the symlink creation
* utility provided by distributed caching
@@ -79,7 +86,7 @@
"-cacheFile", "hdfs://"+fileSys.getName()+CACHE_FILE + "#testlink"
};
- fileSys.delete(new Path(OUTPUT_DIR));
+ fileSys.delete(new Path(OUTPUT_DIR), true);
DataOutputStream file = fileSys.create(new Path(INPUT_FILE));
file.writeBytes(mapString);
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java Thu Jan 8
13:43:18 2009
@@ -561,10 +561,6 @@
*/
public abstract boolean rename(Path src, Path dst) throws IOException;
- /** Delete a file. */
- /** @deprecated Use delete(Path, boolean) instead */ @Deprecated
- public abstract boolean delete(Path f) throws IOException;
-
/** Delete a file.
*
* @param f the path to delete.
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FilterFileSystem.java Thu
Jan 8 13:43:18 2009
@@ -138,11 +138,6 @@
return fs.rename(src, dst);
}
- /** Delete a file */@Deprecated
- public boolean delete(Path f) throws IOException {
- return delete(f, true);
- }
-
/** Delete a file */
public boolean delete(Path f, boolean recursive) throws IOException {
return fs.delete(f, recursive);
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java Thu
Jan 8 13:43:18 2009
@@ -18,17 +18,23 @@
package org.apache.hadoop.fs;
-import java.io.*;
+import java.io.BufferedOutputStream;
+import java.io.DataOutput;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
import java.net.URI;
import java.nio.ByteBuffer;
-import java.nio.channels.FileLock;
-import java.util.*;
+import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.permission.*;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
/****************************************************************
* Implement the FileSystem API for the raw local filesystem.
@@ -252,11 +258,6 @@
return FileUtil.copy(this, src, this, dst, true, getConf());
}
- @Deprecated
- public boolean delete(Path p) throws IOException {
- return delete(p, true);
- }
-
public boolean delete(Path p, boolean recursive) throws IOException {
File f = pathToFile(p);
if (f.isFile()) {
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/ftp/FTPFileSystem.java Thu
Jan 8 13:43:18 2009
@@ -264,13 +264,6 @@
}
}
- /** @deprecated Use delete(Path, boolean) instead */
- @Override
- @Deprecated
- public boolean delete(Path file) throws IOException {
- return delete(file, false);
- }
-
@Override
public boolean delete(Path file, boolean recursive) throws IOException {
FTPClient client = connect();
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/kfs/KosmosFileSystem.java
Thu Jan 8 13:43:18 2009
@@ -20,19 +20,20 @@
package org.apache.hadoop.fs.kfs;
-import java.io.*;
-import java.net.*;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.fs.BlockLocation;
/**
* A FileSystem backed by KFS.
@@ -54,10 +55,12 @@
this.kfsImpl = fsimpl;
}
+ @Override
public URI getUri() {
return uri;
}
+ @Override
public void initialize(URI uri, Configuration conf) throws IOException {
try {
if (kfsImpl == null) {
@@ -82,15 +85,18 @@
}
}
+ @Override
@Deprecated
public String getName() {
return getUri().toString();
}
+ @Override
public Path getWorkingDirectory() {
return workingDir;
}
+ @Override
public void setWorkingDirectory(Path dir) {
workingDir = makeAbsolute(dir);
}
@@ -102,6 +108,7 @@
return new Path(workingDir, path);
}
+ @Override
public boolean mkdirs(Path path, FsPermission permission
) throws IOException {
Path absolute = makeAbsolute(path);
@@ -116,6 +123,7 @@
return res == 0;
}
+ @Override
@Deprecated
public boolean isDirectory(Path path) throws IOException {
Path absolute = makeAbsolute(path);
@@ -126,6 +134,7 @@
return kfsImpl.isDirectory(srep);
}
+ @Override
@Deprecated
public boolean isFile(Path path) throws IOException {
Path absolute = makeAbsolute(path);
@@ -133,6 +142,7 @@
return kfsImpl.isFile(srep);
}
+ @Override
public FileStatus[] listStatus(Path path) throws IOException {
Path absolute = makeAbsolute(path);
String srep = absolute.toUri().getPath();
@@ -143,6 +153,7 @@
return kfsImpl.readdirplus(absolute);
}
+ @Override
public FileStatus getFileStatus(Path path) throws IOException {
Path absolute = makeAbsolute(path);
String srep = absolute.toUri().getPath();
@@ -164,11 +175,13 @@
}
/** This optional operation is not yet supported. */
+ @Override
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
throw new IOException("Not supported");
}
+ @Override
public FSDataOutputStream create(Path file, FsPermission permission,
boolean overwrite, int bufferSize,
short replication, long blockSize,
Progressable progress)
@@ -176,7 +189,7 @@
if (exists(file)) {
if (overwrite) {
- delete(file);
+ delete(file, true);
} else {
throw new IOException("File already exists: " + file);
}
@@ -193,6 +206,7 @@
return kfsImpl.create(srep, replication, bufferSize);
}
+ @Override
public FSDataInputStream open(Path path, int bufferSize) throws
IOException {
if (!exists(path))
throw new IOException("File does not exist: " + path);
@@ -203,6 +217,7 @@
return kfsImpl.open(srep, bufferSize);
}
+ @Override
public boolean rename(Path src, Path dst) throws IOException {
Path absoluteS = makeAbsolute(src);
String srepS = absoluteS.toUri().getPath();
@@ -215,6 +230,7 @@
}
// recursively delete the directory and its contents
+ @Override
public boolean delete(Path path, boolean recursive) throws IOException {
Path absolute = makeAbsolute(path);
String srep = absolute.toUri().getPath();
@@ -235,15 +251,12 @@
return kfsImpl.rmdir(srep) == 0;
}
- @Deprecated
- public boolean delete(Path path) throws IOException {
- return delete(path, true);
- }
-
+ @Override
public short getDefaultReplication() {
return 3;
}
+ @Override
public boolean setReplication(Path path, short replication)
throws IOException {
@@ -256,6 +269,7 @@
// 64MB is the KFS block size
+ @Override
public long getDefaultBlockSize() {
return 1 << 26;
}
@@ -296,19 +310,23 @@
return result;
}
+ @Override
public void copyFromLocalFile(boolean delSrc, Path src, Path dst) throws
IOException {
FileUtil.copy(localFs, src, this, dst, delSrc, getConf());
}
+ @Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst) throws
IOException {
FileUtil.copy(this, src, localFs, dst, delSrc, getConf());
}
+ @Override
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
return tmpLocalFile;
}
+ @Override
public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
moveFromLocalFile(tmpLocalFile, fsOutputFile);
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java
(original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3/S3FileSystem.java Thu
Jan 8 13:43:18 2009
@@ -212,7 +212,7 @@
INode inode = store.retrieveINode(makeAbsolute(file));
if (inode != null) {
if (overwrite) {
- delete(file);
+ delete(file, true);
} else {
throw new IOException("File already exists: " + file);
}
@@ -317,12 +317,6 @@
return true;
}
- @Override
- @Deprecated
- public boolean delete(Path path) throws IOException {
- return delete(path, true);
- }
-
/**
* FileStatus for S3 file systems.
*/
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
Thu Jan 8 13:43:18 2009
@@ -280,12 +280,6 @@
}
@Override
- @Deprecated
- public boolean delete(Path path) throws IOException {
- return delete(path, true);
- }
-
- @Override
public boolean delete(Path f, boolean recursive) throws IOException {
FileStatus status;
try {
Modified:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
(original)
+++
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
Thu Jan 8 13:43:18 2009
@@ -211,14 +211,6 @@
}
/**
- * Get rid of Path f, whether a true file or dir.
- */
- @Deprecated
- public boolean delete(Path f) throws IOException {
- return dfs.delete(getPathName(f));
- }
-
- /**
* requires a boolean check to delete a non
* empty directory recursively.
*/
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Thu
Jan 8 13:43:18 2009
@@ -328,15 +328,6 @@
throw new IOException("Not supported");
}
- @Override
- /*
- * @deprecated Use delete(path, boolean)
- */
- @Deprecated
- public boolean delete(Path f) throws IOException {
- throw new IOException("Not supported");
- }
-
@Override
public boolean delete(Path f, boolean recursive) throws IOException {
throw new IOException("Not supported");
Modified:
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
(original)
+++
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
Thu Jan 8 13:43:18 2009
@@ -17,15 +17,16 @@
*/
package org.apache.hadoop.hdfs.protocol;
-import java.io.*;
+import java.io.FileNotFoundException;
+import java.io.IOException;
-import org.apache.hadoop.ipc.VersionedProtocol;
-import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
-import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
-import org.apache.hadoop.fs.permission.*;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
+import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
+import org.apache.hadoop.ipc.VersionedProtocol;
+import org.apache.hadoop.security.AccessControlException;
/**********************************************************************
* ClientProtocol is used by user code via
@@ -78,7 +79,7 @@
* The name-node does not have a notion of "current" directory for a client.
* <p>
* Once created, the file is visible and available for read to other clients.
- * Although, other clients cannot {...@link #delete(String)}, re-create or
+ * Although, other clients cannot {...@link #delete(String, boolean)},
re-create or
* {...@link #rename(String, String)} it until the file is completed
* or explicitly as a result of lease expiration.
* <p>
@@ -220,7 +221,9 @@
* @param src existing name.
* @return true only if the existing file or directory was actually removed
* from the file system.
+ * @deprecated use {...@link #delete(String, boolean)} istead.
*/
+ @Deprecated
public boolean delete(String src) throws IOException;
/**
Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java
(original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobConf.java Thu Jan
8 13:43:18 2009
@@ -326,14 +326,14 @@
public void deleteLocalFiles() throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i < localDirs.length; i++) {
- FileSystem.getLocal(this).delete(new Path(localDirs[i]));
+ FileSystem.getLocal(this).delete(new Path(localDirs[i]), true);
}
}
public void deleteLocalFiles(String subdir) throws IOException {
String[] localDirs = getLocalDirs();
for (int i = 0; i < localDirs.length; i++) {
- FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir));
+ FileSystem.getLocal(this).delete(new Path(localDirs[i], subdir), true);
}
}
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestLocalDirAllocator.java
Thu Jan 8 13:43:18 2009
@@ -66,7 +66,7 @@
private static void rmBufferDirs() throws IOException {
assertTrue(!localFs.exists(BUFFER_PATH_ROOT) ||
- localFs.delete(BUFFER_PATH_ROOT));
+ localFs.delete(BUFFER_PATH_ROOT, true));
}
private void validateTempDirCreation(int i) throws IOException {
Modified:
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/lib/TestTotalOrderPartitioner.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/lib/TestTotalOrderPartitioner.java?rev=732833&r1=732832&r2=732833&view=diff
==============================================================================
---
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/lib/TestTotalOrderPartitioner.java
(original)
+++
hadoop/core/trunk/src/test/org/apache/hadoop/mapred/lib/TestTotalOrderPartitioner.java
Thu Jan 8 13:43:18 2009
@@ -22,10 +22,7 @@
import java.util.ArrayList;
import java.util.Arrays;
-import junit.framework.Test;
import junit.framework.TestCase;
-import junit.framework.TestSuite;
-import junit.extensions.TestSetup;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -117,7 +114,7 @@
partitioner.getPartition(chk.data, nw, splitStrings.length + 1));
}
} finally {
- p.getFileSystem(job).delete(p);
+ p.getFileSystem(job).delete(p, true);
}
}
@@ -137,7 +134,7 @@
partitioner.getPartition(chk.data, nw, splitStrings.length + 1));
}
} finally {
- p.getFileSystem(job).delete(p);
+ p.getFileSystem(job).delete(p, true);
}
}
@@ -184,7 +181,7 @@
partitioner.getPartition(chk.data, nw, splitStrings.length + 1));
}
} finally {
- p.getFileSystem(job).delete(p);
+ p.getFileSystem(job).delete(p, true);
}
}