Of course you can.
You can find examples in org.apache.hadoop.io.IOUtils.java
Here is my example.
package org.myorg;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.swing.text.html.HTMLDocument.HTMLReader.IsindexAction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.dfs.DistributedFileSystem;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.ToolRunner;
public class FsTest extends Configured implements
org.apache.hadoop.util.Tool {
FileSystem fs;
public FsTest() throws IOException {
super(new Configuration());
System.out.println(getConf().get("fs.default.name",
"file:///"));
fs = FileSystem.get(getConf());
System.out.println(fs instanceof DistributedFileSystem);
}
public int run(String[] args) {
try{
System.out.println(fs.getContentLength(new
Path("/data/xyz")));
// showDfsFile("/data/xyz");
}catch(Exception e){
e.printStackTrace();
}
return 0;
}
public void showDfsFile(String dfsFilePath){
try {
InputStream in = fs.open(new Path("/data/xyz"));//the file in your
hdfs
// IOUtils.copyBytes(in, System.out, getConf());
copy(in, System.out);
} catch (IOException e) {
e.printStackTrace();
}
}
public void copy(InputStream in, OutputStream out)throws IOException{
byte[] buffer = new byte[1024];
try{
int bytesRead = in.read(buffer);
while(bytesRead >= 0){
out.write(buffer, 0, bytesRead);
bytesRead = in.read(buffer);
}
}finally{
in.close();
out.close();
}
}
public void close() throws IOException{
if (fs != null){
fs.close();
fs = null;
}
}
public static void main(String[] args) throws Exception{
System.out.println("begin:");
int res = 0;
FsTest testor = null;
try{
testor = new FsTest();
res = ToolRunner.run(testor, args);
}finally{
if (testor != null){
testor.close();
}
}
System.exit(res);
}
}
在 2008-7-1,上午6:48,<[EMAIL PROTECTED]> 写道:
Hi everybody,
I'm trying to access the hdfs using web services. The idea is that the
web service client can access the HDFS using SOAP or REST and has to
support all the hdfs shell commands.
Is it some work around this?.
I really appreciate any feedback,
Xavier