import java.io.IOException;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hive.ql.io.orc.*;
import org.apache.hadoop.hive.ql.io.orc.OrcSerde.*;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;

import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;

public class orcrd {

	private static Configuration conf = new Configuration();
	public static Reader reader ;

public static void main(String[] args) throws IOException,

    InterruptedException, ClassNotFoundException {

    	String path = "/tmp/orcfile1";

    	try {

    	conf = new Configuration();
	FileSystem fs = FileSystem.getLocal(conf);
	long rowCount = 0 ;
	int colCount = 0 ;
	List<OrcProto.Type> types ;
	List<String> fldnames ;
	List<? extends StructField> fields ;
	int val1 = 0 ;
	String val2, val3 ;	

	reader = OrcFile.createReader(new Path(path),OrcFile.readerOptions(conf).filesystem(fs)) ;
	System.out.println("compressKind = "+reader.getCompression());
	System.out.println("compressSzie = "+reader.getCompressionSize());
	System.out.println("rowCount = "+(rowCount = reader.getNumberOfRows()));
	System.out.println("ContentLength = "+reader.getContentLength());

	StructObjectInspector readerInspector = (StructObjectInspector) reader.getObjectInspector(); 
	fields = readerInspector.getAllStructFieldRefs();
	IntObjectInspector in = (IntObjectInspector) readerInspector.getStructFieldRef("col1").getFieldObjectInspector(); 
	StringObjectInspector st = (StringObjectInspector) readerInspector.getStructFieldRef("col2").getFieldObjectInspector();

	types = reader.getTypes() ;
	colCount = types.size();

	fldnames = types.get(0).getFieldNamesList() ;

	for (int i = 1 ; i < colCount ; i++) {

	System.out.println("colname = "+fldnames.get(i-1)) ;
	switch(types.get(i).getKind())
	{
	  case INT:
		System.out.println("fldtype = "+types.get(i));
		break ;

	  case STRING:
		System.out.println("fldtype = "+types.get(i));
		break ;

	  default:	
		System.out.println("default type");
		break ;
	}
     }

	RecordReader rr = reader.rows() ;

	Object row = rr.next(null) ;
	System.out.println("row = "+row);
	val1 = in.get(readerInspector.getStructFieldData(row, fields.get(0))) ;
	val2 = st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row, fields.get(1)));
	val3 = st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row, fields.get(2)));
	System.out.println("col1 = "+val1+"; col2 = "+val2+"; col3 = "+val3) ;

	while (rr.hasNext()) {
	row = rr.next(row) ;
	System.out.println("row = "+row);
	val1 = in.get(readerInspector.getStructFieldData(row, fields.get(0))) ;
	val2 = st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row, fields.get(1)));
	val3 = st.getPrimitiveJavaObject(readerInspector.getStructFieldData(row, fields.get(2)));
	System.out.println("col1 = "+val1+"; col2 = "+val2+"; col3 = "+val3) ;
	}
    } 
    	catch (Exception e)
    	{
    		e.printStackTrace();
    	}
    }
}
