import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hive.ql.io.orc.*;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;

public class orcw {

	private static Configuration conf = new Configuration();
	public static Writer writer ;

public static class OrcRow
{
	public Object[] columns ;
	
        OrcRow (int colCount) {
                columns = new Object[colCount] ;
        }

        void setFieldValue(int FieldIndex,Object value) {
                columns[FieldIndex] = value ;
        }
}

public static void main(String[] args) throws IOException,

    InterruptedException, ClassNotFoundException {

    	String path = "/tmp/orcfile1";

    	try {

    	conf = new Configuration();
	FileSystem fs = FileSystem.getLocal(conf);

    	ObjectInspector ObjInspector = ObjectInspectorFactory.getReflectionObjectInspector(OrcRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    	writer = OrcFile.createWriter(new Path(path), OrcFile.writerOptions(conf).inspector(ObjInspector).stripeSize(100000).bufferSize(10000).compress(CompressionKind.ZLIB).version(OrcFile.Version.V_0_12));

	OrcRow orcRecord = new OrcRow(3) ;
	orcRecord.setFieldValue(0,new Text("1")) ;
	orcRecord.setFieldValue(1,new Text("hello")) ;
	orcRecord.setFieldValue(2,new Text("orcFile")) ;
	writer.addRow(orcRecord) ;

    	writer.close();
    	} 
    	catch (Exception e)
    	{
    		e.printStackTrace();
    	}
    }
}
