import org.apache.hadoop.fs.FileSystem; 
import org.apache.hadoop.fs.FileStatus; 
import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.fs.FSDataOutputStream; 
import org.apache.hadoop.fs.FSDataInputStream; 
import org.apache.hadoop.conf.Configuration;

import java.io.File;
import java.io.RandomAccessFile;


import java.io.IOException;

public class HDFSReadWrite { 

    public static void main(String[] args) throws IOException { 
        FileSystem hdfs = FileSystem.get(new Configuration());
        

        Path hdfsPath = new Path(hdfs.getWorkingDirectory() + "/" + "netcdf_input" + "/" + "file1.nc");
        Path localPath= new Path("/tmp/foo.txt");

	File hdfsFile = null;
	RandomAccessFile hdfsraf = null;
	RandomAccessFile localraf = null;

	try {
		System.out.println("Opening local RandomAccessFile:" + localPath.toString());
		localraf = new RandomAccessFile(localPath.toString(), "r");
 	} catch (java.io.IOException ioe) {
                System.out.println("failed trying to open " + localPath);
		ioe.printStackTrace();
	} finally {
		localraf.close();
	}

		System.out.println("Opening remote File:" + hdfsPath.toString());
		hdfsFile = new File(hdfsPath.toString(), "r");

	try {
		System.out.println("Opening remote RandomAccessFile:" + hdfsPath.toString());
		hdfsraf = new RandomAccessFile(hdfsPath.toString(), "r");
 	} catch (java.io.IOException ioe) {
                System.out.println("failed trying to open " + hdfsPath);
		ioe.printStackTrace();
        } finally{
		hdfsraf.close();
	}

	
/*
        //writing 
        FSDataOutputStream dos = hdfs.create(path); 
        dos.writeUTF("Hello World"); 
        dos.close();

        //reading 
        FSDataInputStream dis = hdfs.open(path); 
        System.out.println(dis.readUTF()); 
        dis.close();
*/

        hdfs.close(); 

    } 

} 
