http://git-wip-us.apache.org/repos/asf/hive/blob/98303635/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
----------------------------------------------------------------------
diff --git 
a/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
 
b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
new file mode 100644
index 0000000..934a8a5
--- /dev/null
+++ 
b/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/ThriftHive.java
@@ -0,0 +1,7784 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.service;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+public class ThriftHive {
+
+  public interface Iface extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface {
+
+    public void execute(String query) throws HiveServerException, 
org.apache.thrift.TException;
+
+    public String fetchOne() throws HiveServerException, 
org.apache.thrift.TException;
+
+    public List<String> fetchN(int numRows) throws HiveServerException, 
org.apache.thrift.TException;
+
+    public List<String> fetchAll() throws HiveServerException, 
org.apache.thrift.TException;
+
+    public org.apache.hadoop.hive.metastore.api.Schema getSchema() throws 
HiveServerException, org.apache.thrift.TException;
+
+    public org.apache.hadoop.hive.metastore.api.Schema getThriftSchema() 
throws HiveServerException, org.apache.thrift.TException;
+
+    public HiveClusterStatus getClusterStatus() throws HiveServerException, 
org.apache.thrift.TException;
+
+    public org.apache.hadoop.hive.ql.plan.api.QueryPlan getQueryPlan() throws 
HiveServerException, org.apache.thrift.TException;
+
+    public void clean() throws org.apache.thrift.TException;
+
+  }
+
+  public interface AsyncIface extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore .AsyncIface {
+
+    public void execute(String query, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void fetchOne(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void fetchN(int numRows, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void fetchAll(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void getSchema(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void getThriftSchema(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void getClusterStatus(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void getQueryPlan(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+    public void clean(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+
+  }
+
+  public static class Client extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Client implements 
Iface {
+    public static class Factory implements 
org.apache.thrift.TServiceClientFactory<Client> {
+      public Factory() {}
+      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
+        return new Client(prot);
+      }
+      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, 
org.apache.thrift.protocol.TProtocol oprot) {
+        return new Client(iprot, oprot);
+      }
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol prot)
+    {
+      super(prot, prot);
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol iprot, 
org.apache.thrift.protocol.TProtocol oprot) {
+      super(iprot, oprot);
+    }
+
+    public void execute(String query) throws HiveServerException, 
org.apache.thrift.TException
+    {
+      send_execute(query);
+      recv_execute();
+    }
+
+    public void send_execute(String query) throws org.apache.thrift.TException
+    {
+      execute_args args = new execute_args();
+      args.setQuery(query);
+      sendBase("execute", args);
+    }
+
+    public void recv_execute() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      execute_result result = new execute_result();
+      receiveBase(result, "execute");
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      return;
+    }
+
+    public String fetchOne() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      send_fetchOne();
+      return recv_fetchOne();
+    }
+
+    public void send_fetchOne() throws org.apache.thrift.TException
+    {
+      fetchOne_args args = new fetchOne_args();
+      sendBase("fetchOne", args);
+    }
+
+    public String recv_fetchOne() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      fetchOne_result result = new fetchOne_result();
+      receiveBase(result, "fetchOne");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "fetchOne failed: unknown result");
+    }
+
+    public List<String> fetchN(int numRows) throws HiveServerException, 
org.apache.thrift.TException
+    {
+      send_fetchN(numRows);
+      return recv_fetchN();
+    }
+
+    public void send_fetchN(int numRows) throws org.apache.thrift.TException
+    {
+      fetchN_args args = new fetchN_args();
+      args.setNumRows(numRows);
+      sendBase("fetchN", args);
+    }
+
+    public List<String> recv_fetchN() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      fetchN_result result = new fetchN_result();
+      receiveBase(result, "fetchN");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "fetchN failed: unknown result");
+    }
+
+    public List<String> fetchAll() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      send_fetchAll();
+      return recv_fetchAll();
+    }
+
+    public void send_fetchAll() throws org.apache.thrift.TException
+    {
+      fetchAll_args args = new fetchAll_args();
+      sendBase("fetchAll", args);
+    }
+
+    public List<String> recv_fetchAll() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      fetchAll_result result = new fetchAll_result();
+      receiveBase(result, "fetchAll");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "fetchAll failed: unknown result");
+    }
+
+    public org.apache.hadoop.hive.metastore.api.Schema getSchema() throws 
HiveServerException, org.apache.thrift.TException
+    {
+      send_getSchema();
+      return recv_getSchema();
+    }
+
+    public void send_getSchema() throws org.apache.thrift.TException
+    {
+      getSchema_args args = new getSchema_args();
+      sendBase("getSchema", args);
+    }
+
+    public org.apache.hadoop.hive.metastore.api.Schema recv_getSchema() throws 
HiveServerException, org.apache.thrift.TException
+    {
+      getSchema_result result = new getSchema_result();
+      receiveBase(result, "getSchema");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "getSchema failed: unknown result");
+    }
+
+    public org.apache.hadoop.hive.metastore.api.Schema getThriftSchema() 
throws HiveServerException, org.apache.thrift.TException
+    {
+      send_getThriftSchema();
+      return recv_getThriftSchema();
+    }
+
+    public void send_getThriftSchema() throws org.apache.thrift.TException
+    {
+      getThriftSchema_args args = new getThriftSchema_args();
+      sendBase("getThriftSchema", args);
+    }
+
+    public org.apache.hadoop.hive.metastore.api.Schema recv_getThriftSchema() 
throws HiveServerException, org.apache.thrift.TException
+    {
+      getThriftSchema_result result = new getThriftSchema_result();
+      receiveBase(result, "getThriftSchema");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "getThriftSchema failed: unknown result");
+    }
+
+    public HiveClusterStatus getClusterStatus() throws HiveServerException, 
org.apache.thrift.TException
+    {
+      send_getClusterStatus();
+      return recv_getClusterStatus();
+    }
+
+    public void send_getClusterStatus() throws org.apache.thrift.TException
+    {
+      getClusterStatus_args args = new getClusterStatus_args();
+      sendBase("getClusterStatus", args);
+    }
+
+    public HiveClusterStatus recv_getClusterStatus() throws 
HiveServerException, org.apache.thrift.TException
+    {
+      getClusterStatus_result result = new getClusterStatus_result();
+      receiveBase(result, "getClusterStatus");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "getClusterStatus failed: unknown result");
+    }
+
+    public org.apache.hadoop.hive.ql.plan.api.QueryPlan getQueryPlan() throws 
HiveServerException, org.apache.thrift.TException
+    {
+      send_getQueryPlan();
+      return recv_getQueryPlan();
+    }
+
+    public void send_getQueryPlan() throws org.apache.thrift.TException
+    {
+      getQueryPlan_args args = new getQueryPlan_args();
+      sendBase("getQueryPlan", args);
+    }
+
+    public org.apache.hadoop.hive.ql.plan.api.QueryPlan recv_getQueryPlan() 
throws HiveServerException, org.apache.thrift.TException
+    {
+      getQueryPlan_result result = new getQueryPlan_result();
+      receiveBase(result, "getQueryPlan");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.ex != null) {
+        throw result.ex;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "getQueryPlan failed: unknown result");
+    }
+
+    public void clean() throws org.apache.thrift.TException
+    {
+      send_clean();
+      recv_clean();
+    }
+
+    public void send_clean() throws org.apache.thrift.TException
+    {
+      clean_args args = new clean_args();
+      sendBase("clean", args);
+    }
+
+    public void recv_clean() throws org.apache.thrift.TException
+    {
+      clean_result result = new clean_result();
+      receiveBase(result, "clean");
+      return;
+    }
+
+  }
+  public static class AsyncClient extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.AsyncClient implements 
AsyncIface {
+    public static class Factory implements 
org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
+      private org.apache.thrift.async.TAsyncClientManager clientManager;
+      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
+      public Factory(org.apache.thrift.async.TAsyncClientManager 
clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
+        this.clientManager = clientManager;
+        this.protocolFactory = protocolFactory;
+      }
+      public AsyncClient 
getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
+        return new AsyncClient(protocolFactory, clientManager, transport);
+      }
+    }
+
+    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory 
protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, 
org.apache.thrift.transport.TNonblockingTransport transport) {
+      super(protocolFactory, clientManager, transport);
+    }
+
+    public void execute(String query, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      execute_call method_call = new execute_call(query, resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class execute_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      private String query;
+      public execute_call(String query, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.query = query;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("execute", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        execute_args args = new execute_args();
+        args.setQuery(query);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws HiveServerException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_execute();
+      }
+    }
+
+    public void fetchOne(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      fetchOne_call method_call = new fetchOne_call(resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class fetchOne_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public fetchOne_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("fetchOne", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        fetchOne_args args = new fetchOne_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public String getResult() throws HiveServerException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_fetchOne();
+      }
+    }
+
+    public void fetchN(int numRows, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      fetchN_call method_call = new fetchN_call(numRows, resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class fetchN_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      private int numRows;
+      public fetchN_call(int numRows, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.numRows = numRows;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("fetchN", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        fetchN_args args = new fetchN_args();
+        args.setNumRows(numRows);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public List<String> getResult() throws HiveServerException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_fetchN();
+      }
+    }
+
+    public void fetchAll(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      fetchAll_call method_call = new fetchAll_call(resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class fetchAll_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public fetchAll_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("fetchAll", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        fetchAll_args args = new fetchAll_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public List<String> getResult() throws HiveServerException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_fetchAll();
+      }
+    }
+
+    public void getSchema(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getSchema_call method_call = new getSchema_call(resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getSchema_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public getSchema_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("getSchema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getSchema_args args = new getSchema_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public org.apache.hadoop.hive.metastore.api.Schema getResult() throws 
HiveServerException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getSchema();
+      }
+    }
+
+    public void getThriftSchema(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getThriftSchema_call method_call = new 
getThriftSchema_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getThriftSchema_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public getThriftSchema_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("getThriftSchema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getThriftSchema_args args = new getThriftSchema_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public org.apache.hadoop.hive.metastore.api.Schema getResult() throws 
HiveServerException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getThriftSchema();
+      }
+    }
+
+    public void getClusterStatus(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getClusterStatus_call method_call = new 
getClusterStatus_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getClusterStatus_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public getClusterStatus_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("getClusterStatus", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getClusterStatus_args args = new getClusterStatus_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public HiveClusterStatus getResult() throws HiveServerException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getClusterStatus();
+      }
+    }
+
+    public void getQueryPlan(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      getQueryPlan_call method_call = new getQueryPlan_call(resultHandler, 
this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class getQueryPlan_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public getQueryPlan_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("getQueryPlan", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        getQueryPlan_args args = new getQueryPlan_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public org.apache.hadoop.hive.ql.plan.api.QueryPlan getResult() throws 
HiveServerException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_getQueryPlan();
+      }
+    }
+
+    public void clean(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      clean_call method_call = new clean_call(resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class clean_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      public clean_call(org.apache.thrift.async.AsyncMethodCallback 
resultHandler, org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("clean", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        clean_args args = new clean_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_clean();
+      }
+    }
+
+  }
+
+  public static class Processor<I extends Iface> extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor<I> 
implements org.apache.thrift.TProcessor {
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(Processor.class.getName());
+    public Processor(I iface) {
+      super(iface, getProcessMap(new HashMap<String, 
org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
+    }
+
+    protected Processor(I iface, Map<String,  
org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> 
processMap) {
+      super(iface, getProcessMap(processMap));
+    }
+
+    private static <I extends Iface> Map<String,  
org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> 
getProcessMap(Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  
org.apache.thrift.TBase>> processMap) {
+      processMap.put("execute", new execute());
+      processMap.put("fetchOne", new fetchOne());
+      processMap.put("fetchN", new fetchN());
+      processMap.put("fetchAll", new fetchAll());
+      processMap.put("getSchema", new getSchema());
+      processMap.put("getThriftSchema", new getThriftSchema());
+      processMap.put("getClusterStatus", new getClusterStatus());
+      processMap.put("getQueryPlan", new getQueryPlan());
+      processMap.put("clean", new clean());
+      return processMap;
+    }
+
+    public static class execute<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, execute_args> {
+      public execute() {
+        super("execute");
+      }
+
+      public execute_args getEmptyArgsInstance() {
+        return new execute_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public execute_result getResult(I iface, execute_args args) throws 
org.apache.thrift.TException {
+        execute_result result = new execute_result();
+        try {
+          iface.execute(args.query);
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class fetchOne<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, fetchOne_args> {
+      public fetchOne() {
+        super("fetchOne");
+      }
+
+      public fetchOne_args getEmptyArgsInstance() {
+        return new fetchOne_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public fetchOne_result getResult(I iface, fetchOne_args args) throws 
org.apache.thrift.TException {
+        fetchOne_result result = new fetchOne_result();
+        try {
+          result.success = iface.fetchOne();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class fetchN<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, fetchN_args> {
+      public fetchN() {
+        super("fetchN");
+      }
+
+      public fetchN_args getEmptyArgsInstance() {
+        return new fetchN_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public fetchN_result getResult(I iface, fetchN_args args) throws 
org.apache.thrift.TException {
+        fetchN_result result = new fetchN_result();
+        try {
+          result.success = iface.fetchN(args.numRows);
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class fetchAll<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, fetchAll_args> {
+      public fetchAll() {
+        super("fetchAll");
+      }
+
+      public fetchAll_args getEmptyArgsInstance() {
+        return new fetchAll_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public fetchAll_result getResult(I iface, fetchAll_args args) throws 
org.apache.thrift.TException {
+        fetchAll_result result = new fetchAll_result();
+        try {
+          result.success = iface.fetchAll();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class getSchema<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, getSchema_args> {
+      public getSchema() {
+        super("getSchema");
+      }
+
+      public getSchema_args getEmptyArgsInstance() {
+        return new getSchema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getSchema_result getResult(I iface, getSchema_args args) throws 
org.apache.thrift.TException {
+        getSchema_result result = new getSchema_result();
+        try {
+          result.success = iface.getSchema();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class getThriftSchema<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, getThriftSchema_args> {
+      public getThriftSchema() {
+        super("getThriftSchema");
+      }
+
+      public getThriftSchema_args getEmptyArgsInstance() {
+        return new getThriftSchema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getThriftSchema_result getResult(I iface, getThriftSchema_args 
args) throws org.apache.thrift.TException {
+        getThriftSchema_result result = new getThriftSchema_result();
+        try {
+          result.success = iface.getThriftSchema();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class getClusterStatus<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, getClusterStatus_args> {
+      public getClusterStatus() {
+        super("getClusterStatus");
+      }
+
+      public getClusterStatus_args getEmptyArgsInstance() {
+        return new getClusterStatus_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getClusterStatus_result getResult(I iface, getClusterStatus_args 
args) throws org.apache.thrift.TException {
+        getClusterStatus_result result = new getClusterStatus_result();
+        try {
+          result.success = iface.getClusterStatus();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class getQueryPlan<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, getQueryPlan_args> {
+      public getQueryPlan() {
+        super("getQueryPlan");
+      }
+
+      public getQueryPlan_args getEmptyArgsInstance() {
+        return new getQueryPlan_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public getQueryPlan_result getResult(I iface, getQueryPlan_args args) 
throws org.apache.thrift.TException {
+        getQueryPlan_result result = new getQueryPlan_result();
+        try {
+          result.success = iface.getQueryPlan();
+        } catch (HiveServerException ex) {
+          result.ex = ex;
+        }
+        return result;
+      }
+    }
+
+    public static class clean<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, clean_args> {
+      public clean() {
+        super("clean");
+      }
+
+      public clean_args getEmptyArgsInstance() {
+        return new clean_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public clean_result getResult(I iface, clean_args args) throws 
org.apache.thrift.TException {
+        clean_result result = new clean_result();
+        iface.clean();
+        return result;
+      }
+    }
+
+  }
+
+  public static class AsyncProcessor<I extends AsyncIface> extends 
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.AsyncProcessor<I> {
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(AsyncProcessor.class.getName());
+    public AsyncProcessor(I iface) {
+      super(iface, getProcessMap(new HashMap<String, 
org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, 
?>>()));
+    }
+
+    protected AsyncProcessor(I iface, Map<String,  
org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, 
?>> processMap) {
+      super(iface, getProcessMap(processMap));
+    }
+
+    private static <I extends AsyncIface> Map<String,  
org.apache.thrift.AsyncProcessFunction<I, ? extends  
org.apache.thrift.TBase,?>> getProcessMap(Map<String,  
org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, 
?>> processMap) {
+      processMap.put("execute", new execute());
+      processMap.put("fetchOne", new fetchOne());
+      processMap.put("fetchN", new fetchN());
+      processMap.put("fetchAll", new fetchAll());
+      processMap.put("getSchema", new getSchema());
+      processMap.put("getThriftSchema", new getThriftSchema());
+      processMap.put("getClusterStatus", new getClusterStatus());
+      processMap.put("getQueryPlan", new getQueryPlan());
+      processMap.put("clean", new clean());
+      return processMap;
+    }
+
+    public static class execute<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, execute_args, Void> {
+      public execute() {
+        super("execute");
+      }
+
+      public execute_args getEmptyArgsInstance() {
+        return new execute_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            execute_result result = new execute_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            execute_result result = new execute_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, execute_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.execute(args.query,resultHandler);
+      }
+    }
+
+    public static class fetchOne<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, fetchOne_args, String> {
+      public fetchOne() {
+        super("fetchOne");
+      }
+
+      public fetchOne_args getEmptyArgsInstance() {
+        return new fetchOne_args();
+      }
+
+      public AsyncMethodCallback<String> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<String>() { 
+          public void onComplete(String o) {
+            fetchOne_result result = new fetchOne_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            fetchOne_result result = new fetchOne_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, fetchOne_args args, 
org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws 
TException {
+        iface.fetchOne(resultHandler);
+      }
+    }
+
+    public static class fetchN<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, fetchN_args, List<String>> {
+      public fetchN() {
+        super("fetchN");
+      }
+
+      public fetchN_args getEmptyArgsInstance() {
+        return new fetchN_args();
+      }
+
+      public AsyncMethodCallback<List<String>> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<List<String>>() { 
+          public void onComplete(List<String> o) {
+            fetchN_result result = new fetchN_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            fetchN_result result = new fetchN_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, fetchN_args args, 
org.apache.thrift.async.AsyncMethodCallback<List<String>> resultHandler) throws 
TException {
+        iface.fetchN(args.numRows,resultHandler);
+      }
+    }
+
+    public static class fetchAll<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, fetchAll_args, List<String>> {
+      public fetchAll() {
+        super("fetchAll");
+      }
+
+      public fetchAll_args getEmptyArgsInstance() {
+        return new fetchAll_args();
+      }
+
+      public AsyncMethodCallback<List<String>> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<List<String>>() { 
+          public void onComplete(List<String> o) {
+            fetchAll_result result = new fetchAll_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            fetchAll_result result = new fetchAll_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, fetchAll_args args, 
org.apache.thrift.async.AsyncMethodCallback<List<String>> resultHandler) throws 
TException {
+        iface.fetchAll(resultHandler);
+      }
+    }
+
+    public static class getSchema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, getSchema_args, 
org.apache.hadoop.hive.metastore.api.Schema> {
+      public getSchema() {
+        super("getSchema");
+      }
+
+      public getSchema_args getEmptyArgsInstance() {
+        return new getSchema_args();
+      }
+
+      public AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema> 
getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new 
AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema>() { 
+          public void onComplete(org.apache.hadoop.hive.metastore.api.Schema 
o) {
+            getSchema_result result = new getSchema_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            getSchema_result result = new getSchema_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, getSchema_args args, 
org.apache.thrift.async.AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema>
 resultHandler) throws TException {
+        iface.getSchema(resultHandler);
+      }
+    }
+
+    public static class getThriftSchema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, getThriftSchema_args, 
org.apache.hadoop.hive.metastore.api.Schema> {
+      public getThriftSchema() {
+        super("getThriftSchema");
+      }
+
+      public getThriftSchema_args getEmptyArgsInstance() {
+        return new getThriftSchema_args();
+      }
+
+      public AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema> 
getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new 
AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema>() { 
+          public void onComplete(org.apache.hadoop.hive.metastore.api.Schema 
o) {
+            getThriftSchema_result result = new getThriftSchema_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            getThriftSchema_result result = new getThriftSchema_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, getThriftSchema_args args, 
org.apache.thrift.async.AsyncMethodCallback<org.apache.hadoop.hive.metastore.api.Schema>
 resultHandler) throws TException {
+        iface.getThriftSchema(resultHandler);
+      }
+    }
+
+    public static class getClusterStatus<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, getClusterStatus_args, 
HiveClusterStatus> {
+      public getClusterStatus() {
+        super("getClusterStatus");
+      }
+
+      public getClusterStatus_args getEmptyArgsInstance() {
+        return new getClusterStatus_args();
+      }
+
+      public AsyncMethodCallback<HiveClusterStatus> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<HiveClusterStatus>() { 
+          public void onComplete(HiveClusterStatus o) {
+            getClusterStatus_result result = new getClusterStatus_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            getClusterStatus_result result = new getClusterStatus_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, getClusterStatus_args args, 
org.apache.thrift.async.AsyncMethodCallback<HiveClusterStatus> resultHandler) 
throws TException {
+        iface.getClusterStatus(resultHandler);
+      }
+    }
+
+    public static class getQueryPlan<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, getQueryPlan_args, 
org.apache.hadoop.hive.ql.plan.api.QueryPlan> {
+      public getQueryPlan() {
+        super("getQueryPlan");
+      }
+
+      public getQueryPlan_args getEmptyArgsInstance() {
+        return new getQueryPlan_args();
+      }
+
+      public AsyncMethodCallback<org.apache.hadoop.hive.ql.plan.api.QueryPlan> 
getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new 
AsyncMethodCallback<org.apache.hadoop.hive.ql.plan.api.QueryPlan>() { 
+          public void onComplete(org.apache.hadoop.hive.ql.plan.api.QueryPlan 
o) {
+            getQueryPlan_result result = new getQueryPlan_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            getQueryPlan_result result = new getQueryPlan_result();
+            if (e instanceof HiveServerException) {
+                        result.ex = (HiveServerException) e;
+                        result.setExIsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, getQueryPlan_args args, 
org.apache.thrift.async.AsyncMethodCallback<org.apache.hadoop.hive.ql.plan.api.QueryPlan>
 resultHandler) throws TException {
+        iface.getQueryPlan(resultHandler);
+      }
+    }
+
+    public static class clean<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, clean_args, Void> {
+      public clean() {
+        super("clean");
+      }
+
+      public clean_args getEmptyArgsInstance() {
+        return new clean_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            clean_result result = new clean_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            clean_result result = new clean_result();
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, clean_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.clean(resultHandler);
+      }
+    }
+
+  }
+
+  public static class execute_args implements 
org.apache.thrift.TBase<execute_args, execute_args._Fields>, 
java.io.Serializable, Cloneable, Comparable<execute_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("execute_args");
+
+    private static final org.apache.thrift.protocol.TField QUERY_FIELD_DESC = 
new org.apache.thrift.protocol.TField("query", 
org.apache.thrift.protocol.TType.STRING, (short)1);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes 
= new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new 
execute_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new execute_argsTupleSchemeFactory());
+    }
+
+    private String query; // required
+
+    /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      QUERY((short)1, "query");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not 
found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // QUERY
+            return QUERY;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, 
org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.QUERY, new 
org.apache.thrift.meta_data.FieldMetaData("query", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(execute_args.class,
 metaDataMap);
+    }
+
+    public execute_args() {
+    }
+
+    public execute_args(
+      String query)
+    {
+      this();
+      this.query = query;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public execute_args(execute_args other) {
+      if (other.isSetQuery()) {
+        this.query = other.query;
+      }
+    }
+
+    public execute_args deepCopy() {
+      return new execute_args(this);
+    }
+
+    @Override
+    public void clear() {
+      this.query = null;
+    }
+
+    public String getQuery() {
+      return this.query;
+    }
+
+    public void setQuery(String query) {
+      this.query = query;
+    }
+
+    public void unsetQuery() {
+      this.query = null;
+    }
+
+    /** Returns true if field query is set (has been assigned a value) and 
false otherwise */
+    public boolean isSetQuery() {
+      return this.query != null;
+    }
+
+    public void setQueryIsSet(boolean value) {
+      if (!value) {
+        this.query = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case QUERY:
+        if (value == null) {
+          unsetQuery();
+        } else {
+          setQuery((String)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case QUERY:
+        return getQuery();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been 
assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case QUERY:
+        return isSetQuery();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof execute_args)
+        return this.equals((execute_args)that);
+      return false;
+    }
+
+    public boolean equals(execute_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_query = true && this.isSetQuery();
+      boolean that_present_query = true && that.isSetQuery();
+      if (this_present_query || that_present_query) {
+        if (!(this_present_query && that_present_query))
+          return false;
+        if (!this.query.equals(that.query))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      boolean present_query = true && (isSetQuery());
+      list.add(present_query);
+      if (present_query)
+        list.add(query);
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(execute_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = 
Boolean.valueOf(isSetQuery()).compareTo(other.isSetQuery());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetQuery()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.query, 
other.query);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("execute_args(");
+      boolean first = true;
+
+      sb.append("query:");
+      if (this.query == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.query);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class execute_argsStandardSchemeFactory implements 
SchemeFactory {
+      public execute_argsStandardScheme getScheme() {
+        return new execute_argsStandardScheme();
+      }
+    }
+
+    private static class execute_argsStandardScheme extends 
StandardScheme<execute_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, 
execute_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // QUERY
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) 
{
+                struct.query = iprot.readString();
+                struct.setQueryIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, 
execute_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.query != null) {
+          oprot.writeFieldBegin(QUERY_FIELD_DESC);
+          oprot.writeString(struct.query);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class execute_argsTupleSchemeFactory implements 
SchemeFactory {
+      public execute_argsTupleScheme getScheme() {
+        return new execute_argsTupleScheme();
+      }
+    }
+
+    private static class execute_argsTupleScheme extends 
TupleScheme<execute_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, 
execute_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetQuery()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.isSetQuery()) {
+          oprot.writeString(struct.query);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, execute_args 
struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.query = iprot.readString();
+          struct.setQueryIsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class execute_result implements 
org.apache.thrift.TBase<execute_result, execute_result._Fields>, 
java.io.Serializable, Cloneable, Comparable<execute_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("execute_result");
+
+    private static final org.apache.thrift.protocol.TField EX_FIELD_DESC = new 
org.apache.thrift.protocol.TField("ex", 
org.apache.thrift.protocol.TType.STRUCT, (short)1);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes 
= new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new 
execute_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new execute_resultTupleSchemeFactory());
+    }
+
+    private HiveServerException ex; // required
+
+    /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      EX((short)1, "ex");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not 
found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // EX
+            return EX;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    public static final Map<_Fields, 
org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.EX, new 
org.apache.thrift.meta_data.FieldMetaData("ex", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(execute_result.class,
 metaDataMap);
+    }
+
+    public execute_result() {
+    }
+
+    public execute_result(
+      HiveServerException ex)
+    {
+      this();
+      this.ex = ex;
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public execute_result(execute_result other) {
+      if (other.isSetEx()) {
+        this.ex = new HiveServerException(other.ex);
+      }
+    }
+
+    public execute_result deepCopy() {
+      return new execute_result(this);
+    }
+
+    @Override
+    public void clear() {
+      this.ex = null;
+    }
+
+    public HiveServerException getEx() {
+      return this.ex;
+    }
+
+    public void setEx(HiveServerException ex) {
+      this.ex = ex;
+    }
+
+    public void unsetEx() {
+      this.ex = null;
+    }
+
+    /** Returns true if field ex is set (has been assigned a value) and false 
otherwise */
+    public boolean isSetEx() {
+      return this.ex != null;
+    }
+
+    public void setExIsSet(boolean value) {
+      if (!value) {
+        this.ex = null;
+      }
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case EX:
+        if (value == null) {
+          unsetEx();
+        } else {
+          setEx((HiveServerException)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case EX:
+        return getEx();
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been 
assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case EX:
+        return isSetEx();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof execute_result)
+        return this.equals((execute_result)that);
+      return false;
+    }
+
+    public boolean equals(execute_result that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_ex = true && this.isSetEx();
+      boolean that_present_ex = true && that.isSetEx();
+      if (this_present_ex || that_present_ex) {
+        if (!(this_present_ex && that_present_ex))
+          return false;
+        if (!this.ex.equals(that.ex))
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      boolean present_ex = true && (isSetEx());
+      list.add(present_ex);
+      if (present_ex)
+        list.add(ex);
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(execute_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(isSetEx()).compareTo(other.isSetEx());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetEx()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ex, 
other.ex);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("execute_result(");
+      boolean first = true;
+
+      sb.append("ex:");
+      if (this.ex == null) {
+        sb.append("null");
+      } else {
+        sb.append(this.ex);
+      }
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class execute_resultStandardSchemeFactory implements 
SchemeFactory {
+      public execute_resultStandardScheme getScheme() {
+        return new execute_resultStandardScheme();
+      }
+    }
+
+    private static class execute_resultStandardScheme extends 
StandardScheme<execute_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, 
execute_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // EX
+              if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) 
{
+                struct.ex = new HiveServerException();
+                struct.ex.read(iprot);
+                struct.setExIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, 
execute_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.ex != null) {
+          oprot.writeFieldBegin(EX_FIELD_DESC);
+          struct.ex.write(oprot);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class execute_resultTupleSchemeFactory implements 
SchemeFactory {
+      public execute_resultTupleScheme getScheme() {
+        return new execute_resultTupleScheme();
+      }
+    }
+
+    private static class execute_resultTupleScheme extends 
TupleScheme<execute_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, 
execute_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetEx()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.isSetEx()) {
+          struct.ex.write(oprot);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, 
execute_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.ex = new HiveServerException();
+          struct.ex.read(iprot);
+          struct.setExIsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class fetchOne_args implements 
org.apache.thrift.TBase<fetchOne_args, fetchOne_args._Fields>, 
java.io.Serializable, Cloneable, Comparable<fetchOne_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("fetchOne_args");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes 
= new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new 
fetchOne_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new fetchOne_argsTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not 
found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, 
org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(fetchOne_args.class,
 metaDataMap);
+    }
+
+    public fetchOne_args() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public fetchOne_args(fetchOne_args other) {
+    }
+
+    public fetchOne_args deepCopy() {
+      return new fetchOne_args(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been 
assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof fetchOne_args)
+        return this.equals((fetchOne_args)that);
+      return false;
+    }
+
+    public boolean equals(fetchOne_args that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      List<Object> list = new ArrayList<Object>();
+
+      return list.hashCode();
+    }
+
+    @Override
+    public int compareTo(fetchOne_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("fetchOne_args(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class fetchOne_argsStandardSchemeFactory implements 
SchemeFactory {
+      public fetchOne_argsStandardScheme getScheme() {
+        return new fetchOne_argsStandardScheme();
+      }
+    }
+
+    private static class fetchOne_argsStandardScheme extends 
StandardScheme<fetchOne_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, 
fetchOne_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          swit

<TRUNCATED>

Reply via email to