http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
 
b/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
new file mode 100644
index 0000000..73dd45d
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUnionTypeEntry.java
@@ -0,0 +1,448 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hive.service.cli.thrift;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TUnionTypeEntry implements 
org.apache.thrift.TBase<TUnionTypeEntry, TUnionTypeEntry._Fields>, 
java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TUnionTypeEntry");
+
+  private static final org.apache.thrift.protocol.TField 
NAME_TO_TYPE_PTR_FIELD_DESC = new 
org.apache.thrift.protocol.TField("nameToTypePtr", 
org.apache.thrift.protocol.TType.MAP, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = 
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new 
TUnionTypeEntryStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new TUnionTypeEntryTupleSchemeFactory());
+  }
+
+  private Map<String,Integer> nameToTypePtr; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    NAME_TO_TYPE_PTR((short)1, "nameToTypePtr");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not 
found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // NAME_TO_TYPE_PTR
+          return NAME_TO_TYPE_PTR;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.NAME_TO_TYPE_PTR, new 
org.apache.thrift.meta_data.FieldMetaData("nameToTypePtr", 
org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new 
org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
 
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32
            , "TTypeEntryPtr"))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TUnionTypeEntry.class,
 metaDataMap);
+  }
+
+  public TUnionTypeEntry() {
+  }
+
+  public TUnionTypeEntry(
+    Map<String,Integer> nameToTypePtr)
+  {
+    this();
+    this.nameToTypePtr = nameToTypePtr;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public TUnionTypeEntry(TUnionTypeEntry other) {
+    if (other.isSetNameToTypePtr()) {
+      Map<String,Integer> __this__nameToTypePtr = new 
HashMap<String,Integer>();
+      for (Map.Entry<String, Integer> other_element : 
other.nameToTypePtr.entrySet()) {
+
+        String other_element_key = other_element.getKey();
+        Integer other_element_value = other_element.getValue();
+
+        String __this__nameToTypePtr_copy_key = other_element_key;
+
+        Integer __this__nameToTypePtr_copy_value = other_element_value;
+
+        __this__nameToTypePtr.put(__this__nameToTypePtr_copy_key, 
__this__nameToTypePtr_copy_value);
+      }
+      this.nameToTypePtr = __this__nameToTypePtr;
+    }
+  }
+
+  public TUnionTypeEntry deepCopy() {
+    return new TUnionTypeEntry(this);
+  }
+
+  @Override
+  public void clear() {
+    this.nameToTypePtr = null;
+  }
+
+  public int getNameToTypePtrSize() {
+    return (this.nameToTypePtr == null) ? 0 : this.nameToTypePtr.size();
+  }
+
+  public void putToNameToTypePtr(String key, int val) {
+    if (this.nameToTypePtr == null) {
+      this.nameToTypePtr = new HashMap<String,Integer>();
+    }
+    this.nameToTypePtr.put(key, val);
+  }
+
+  public Map<String,Integer> getNameToTypePtr() {
+    return this.nameToTypePtr;
+  }
+
+  public void setNameToTypePtr(Map<String,Integer> nameToTypePtr) {
+    this.nameToTypePtr = nameToTypePtr;
+  }
+
+  public void unsetNameToTypePtr() {
+    this.nameToTypePtr = null;
+  }
+
+  /** Returns true if field nameToTypePtr is set (has been assigned a value) 
and false otherwise */
+  public boolean isSetNameToTypePtr() {
+    return this.nameToTypePtr != null;
+  }
+
+  public void setNameToTypePtrIsSet(boolean value) {
+    if (!value) {
+      this.nameToTypePtr = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case NAME_TO_TYPE_PTR:
+      if (value == null) {
+        unsetNameToTypePtr();
+      } else {
+        setNameToTypePtr((Map<String,Integer>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case NAME_TO_TYPE_PTR:
+      return getNameToTypePtr();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned 
a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case NAME_TO_TYPE_PTR:
+      return isSetNameToTypePtr();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof TUnionTypeEntry)
+      return this.equals((TUnionTypeEntry)that);
+    return false;
+  }
+
+  public boolean equals(TUnionTypeEntry that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_nameToTypePtr = true && this.isSetNameToTypePtr();
+    boolean that_present_nameToTypePtr = true && that.isSetNameToTypePtr();
+    if (this_present_nameToTypePtr || that_present_nameToTypePtr) {
+      if (!(this_present_nameToTypePtr && that_present_nameToTypePtr))
+        return false;
+      if (!this.nameToTypePtr.equals(that.nameToTypePtr))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    HashCodeBuilder builder = new HashCodeBuilder();
+
+    boolean present_nameToTypePtr = true && (isSetNameToTypePtr());
+    builder.append(present_nameToTypePtr);
+    if (present_nameToTypePtr)
+      builder.append(nameToTypePtr);
+
+    return builder.toHashCode();
+  }
+
+  public int compareTo(TUnionTypeEntry other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    TUnionTypeEntry typedOther = (TUnionTypeEntry)other;
+
+    lastComparison = 
Boolean.valueOf(isSetNameToTypePtr()).compareTo(typedOther.isSetNameToTypePtr());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetNameToTypePtr()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.nameToTypePtr, 
typedOther.nameToTypePtr);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("TUnionTypeEntry(");
+    boolean first = true;
+
+    sb.append("nameToTypePtr:");
+    if (this.nameToTypePtr == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.nameToTypePtr);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetNameToTypePtr()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 
'nameToTypePtr' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TUnionTypeEntryStandardSchemeFactory implements 
SchemeFactory {
+    public TUnionTypeEntryStandardScheme getScheme() {
+      return new TUnionTypeEntryStandardScheme();
+    }
+  }
+
+  private static class TUnionTypeEntryStandardScheme extends 
StandardScheme<TUnionTypeEntry> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, 
TUnionTypeEntry struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // NAME_TO_TYPE_PTR
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map20 = iprot.readMapBegin();
+                struct.nameToTypePtr = new 
HashMap<String,Integer>(2*_map20.size);
+                for (int _i21 = 0; _i21 < _map20.size; ++_i21)
+                {
+                  String _key22; // required
+                  int _val23; // required
+                  _key22 = iprot.readString();
+                  _val23 = iprot.readI32();
+                  struct.nameToTypePtr.put(_key22, _val23);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setNameToTypePtrIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, 
TUnionTypeEntry struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.nameToTypePtr != null) {
+        oprot.writeFieldBegin(NAME_TO_TYPE_PTR_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new 
org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, 
org.apache.thrift.protocol.TType.I32, struct.nameToTypePtr.size()));
+          for (Map.Entry<String, Integer> _iter24 : 
struct.nameToTypePtr.entrySet())
+          {
+            oprot.writeString(_iter24.getKey());
+            oprot.writeI32(_iter24.getValue());
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TUnionTypeEntryTupleSchemeFactory implements 
SchemeFactory {
+    public TUnionTypeEntryTupleScheme getScheme() {
+      return new TUnionTypeEntryTupleScheme();
+    }
+  }
+
+  private static class TUnionTypeEntryTupleScheme extends 
TupleScheme<TUnionTypeEntry> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, 
TUnionTypeEntry struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      {
+        oprot.writeI32(struct.nameToTypePtr.size());
+        for (Map.Entry<String, Integer> _iter25 : 
struct.nameToTypePtr.entrySet())
+        {
+          oprot.writeString(_iter25.getKey());
+          oprot.writeI32(_iter25.getValue());
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, 
TUnionTypeEntry struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      {
+        org.apache.thrift.protocol.TMap _map26 = new 
org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, 
org.apache.thrift.protocol.TType.I32, iprot.readI32());
+        struct.nameToTypePtr = new HashMap<String,Integer>(2*_map26.size);
+        for (int _i27 = 0; _i27 < _map26.size; ++_i27)
+        {
+          String _key28; // required
+          int _val29; // required
+          _key28 = iprot.readString();
+          _val29 = iprot.readI32();
+          struct.nameToTypePtr.put(_key28, _val29);
+        }
+      }
+      struct.setNameToTypePtrIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
 
b/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
new file mode 100644
index 0000000..3a111a2
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/gen/java/org/apache/hive/service/cli/thrift/TUserDefinedTypeEntry.java
@@ -0,0 +1,385 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hive.service.cli.thrift;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TUserDefinedTypeEntry implements 
org.apache.thrift.TBase<TUserDefinedTypeEntry, TUserDefinedTypeEntry._Fields>, 
java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TUserDefinedTypeEntry");
+
+  private static final org.apache.thrift.protocol.TField 
TYPE_CLASS_NAME_FIELD_DESC = new 
org.apache.thrift.protocol.TField("typeClassName", 
org.apache.thrift.protocol.TType.STRING, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = 
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new 
TUserDefinedTypeEntryStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new 
TUserDefinedTypeEntryTupleSchemeFactory());
+  }
+
+  private String typeClassName; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    TYPE_CLASS_NAME((short)1, "typeClassName");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not 
found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // TYPE_CLASS_NAME
+          return TYPE_CLASS_NAME;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.TYPE_CLASS_NAME, new 
org.apache.thrift.meta_data.FieldMetaData("typeClassName", 
org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TUserDefinedTypeEntry.class,
 metaDataMap);
+  }
+
+  public TUserDefinedTypeEntry() {
+  }
+
+  public TUserDefinedTypeEntry(
+    String typeClassName)
+  {
+    this();
+    this.typeClassName = typeClassName;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public TUserDefinedTypeEntry(TUserDefinedTypeEntry other) {
+    if (other.isSetTypeClassName()) {
+      this.typeClassName = other.typeClassName;
+    }
+  }
+
+  public TUserDefinedTypeEntry deepCopy() {
+    return new TUserDefinedTypeEntry(this);
+  }
+
+  @Override
+  public void clear() {
+    this.typeClassName = null;
+  }
+
+  public String getTypeClassName() {
+    return this.typeClassName;
+  }
+
+  public void setTypeClassName(String typeClassName) {
+    this.typeClassName = typeClassName;
+  }
+
+  public void unsetTypeClassName() {
+    this.typeClassName = null;
+  }
+
+  /** Returns true if field typeClassName is set (has been assigned a value) 
and false otherwise */
+  public boolean isSetTypeClassName() {
+    return this.typeClassName != null;
+  }
+
+  public void setTypeClassNameIsSet(boolean value) {
+    if (!value) {
+      this.typeClassName = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case TYPE_CLASS_NAME:
+      if (value == null) {
+        unsetTypeClassName();
+      } else {
+        setTypeClassName((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case TYPE_CLASS_NAME:
+      return getTypeClassName();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned 
a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case TYPE_CLASS_NAME:
+      return isSetTypeClassName();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof TUserDefinedTypeEntry)
+      return this.equals((TUserDefinedTypeEntry)that);
+    return false;
+  }
+
+  public boolean equals(TUserDefinedTypeEntry that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_typeClassName = true && this.isSetTypeClassName();
+    boolean that_present_typeClassName = true && that.isSetTypeClassName();
+    if (this_present_typeClassName || that_present_typeClassName) {
+      if (!(this_present_typeClassName && that_present_typeClassName))
+        return false;
+      if (!this.typeClassName.equals(that.typeClassName))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    HashCodeBuilder builder = new HashCodeBuilder();
+
+    boolean present_typeClassName = true && (isSetTypeClassName());
+    builder.append(present_typeClassName);
+    if (present_typeClassName)
+      builder.append(typeClassName);
+
+    return builder.toHashCode();
+  }
+
+  public int compareTo(TUserDefinedTypeEntry other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    TUserDefinedTypeEntry typedOther = (TUserDefinedTypeEntry)other;
+
+    lastComparison = 
Boolean.valueOf(isSetTypeClassName()).compareTo(typedOther.isSetTypeClassName());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetTypeClassName()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.typeClassName, 
typedOther.typeClassName);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("TUserDefinedTypeEntry(");
+    boolean first = true;
+
+    sb.append("typeClassName:");
+    if (this.typeClassName == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.typeClassName);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetTypeClassName()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 
'typeClassName' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TUserDefinedTypeEntryStandardSchemeFactory implements 
SchemeFactory {
+    public TUserDefinedTypeEntryStandardScheme getScheme() {
+      return new TUserDefinedTypeEntryStandardScheme();
+    }
+  }
+
+  private static class TUserDefinedTypeEntryStandardScheme extends 
StandardScheme<TUserDefinedTypeEntry> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, 
TUserDefinedTypeEntry struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // TYPE_CLASS_NAME
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.typeClassName = iprot.readString();
+              struct.setTypeClassNameIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, 
TUserDefinedTypeEntry struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.typeClassName != null) {
+        oprot.writeFieldBegin(TYPE_CLASS_NAME_FIELD_DESC);
+        oprot.writeString(struct.typeClassName);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TUserDefinedTypeEntryTupleSchemeFactory implements 
SchemeFactory {
+    public TUserDefinedTypeEntryTupleScheme getScheme() {
+      return new TUserDefinedTypeEntryTupleScheme();
+    }
+  }
+
+  private static class TUserDefinedTypeEntryTupleScheme extends 
TupleScheme<TUserDefinedTypeEntry> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, 
TUserDefinedTypeEntry struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.typeClassName);
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, 
TUserDefinedTypeEntry struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.typeClassName = iprot.readString();
+      struct.setTypeClassNameIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
new file mode 100644
index 0000000..c2a2b2d
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/AbstractService.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * AbstractService.
+ *
+ */
+public abstract class AbstractService implements Service {
+
+  private static final Log LOG = LogFactory.getLog(AbstractService.class);
+
+  /**
+   * Service state: initially {@link STATE#NOTINITED}.
+   */
+  private STATE state = STATE.NOTINITED;
+
+  /**
+   * Service name.
+   */
+  private final String name;
+  /**
+   * Service start time. Will be zero until the service is started.
+   */
+  private long startTime;
+
+  /**
+   * The configuration. Will be null until the service is initialized.
+   */
+  private HiveConf hiveConf;
+
+  /**
+   * List of state change listeners; it is final to ensure
+   * that it will never be null.
+   */
+  private final List<ServiceStateChangeListener> listeners =
+      new ArrayList<ServiceStateChangeListener>();
+
+  /**
+   * Construct the service.
+   *
+   * @param name
+   *          service name
+   */
+  public AbstractService(String name) {
+    this.name = name;
+  }
+
+  @Override
+  public synchronized STATE getServiceState() {
+    return state;
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @throws IllegalStateException
+   *           if the current service state does not permit
+   *           this action
+   */
+  @Override
+  public synchronized void init(HiveConf hiveConf) {
+    ensureCurrentState(STATE.NOTINITED);
+    this.hiveConf = hiveConf;
+    changeState(STATE.INITED);
+    LOG.info("Service:" + getName() + " is inited.");
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @throws IllegalStateException
+   *           if the current service state does not permit
+   *           this action
+   */
+  @Override
+  public synchronized void start() {
+    startTime = System.currentTimeMillis();
+    ensureCurrentState(STATE.INITED);
+    changeState(STATE.STARTED);
+    LOG.info("Service:" + getName() + " is started.");
+  }
+
+  /**
+   * {@inheritDoc}
+   *
+   * @throws IllegalStateException
+   *           if the current service state does not permit
+   *           this action
+   */
+  @Override
+  public synchronized void stop() {
+    if (state == STATE.STOPPED ||
+        state == STATE.INITED ||
+        state == STATE.NOTINITED) {
+      // already stopped, or else it was never
+      // started (eg another service failing canceled startup)
+      return;
+    }
+    ensureCurrentState(STATE.STARTED);
+    changeState(STATE.STOPPED);
+    LOG.info("Service:" + getName() + " is stopped.");
+  }
+
+  @Override
+  public synchronized void register(ServiceStateChangeListener l) {
+    listeners.add(l);
+  }
+
+  @Override
+  public synchronized void unregister(ServiceStateChangeListener l) {
+    listeners.remove(l);
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public synchronized HiveConf getHiveConf() {
+    return hiveConf;
+  }
+
+  @Override
+  public long getStartTime() {
+    return startTime;
+  }
+
+  /**
+   * Verify that that a service is in a given state.
+   *
+   * @param currentState
+   *          the desired state
+   * @throws IllegalStateException
+   *           if the service state is different from
+   *           the desired state
+   */
+  private void ensureCurrentState(STATE currentState) {
+    ServiceOperations.ensureCurrentState(state, currentState);
+  }
+
+  /**
+   * Change to a new state and notify all listeners.
+   * This is a private method that is only invoked from synchronized methods,
+   * which avoid having to clone the listener list. It does imply that
+   * the state change listener methods should be short lived, as they
+   * will delay the state transition.
+   *
+   * @param newState
+   *          new service state
+   */
+  private void changeState(STATE newState) {
+    state = newState;
+    // notify listeners
+    for (ServiceStateChangeListener l : listeners) {
+      l.stateChanged(this);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/BreakableService.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/BreakableService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/BreakableService.java
new file mode 100644
index 0000000..9c44beb
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/BreakableService.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.Service.STATE;
+
+/**
+ * This is a service that can be configured to break on any of the lifecycle
+ * events, so test the failure handling of other parts of the service
+ * infrastructure.
+ *
+ * It retains a counter to the number of times each entry point is called -
+ * these counters are incremented before the exceptions are raised and
+ * before the superclass state methods are invoked.
+ *
+ */
+public class BreakableService extends AbstractService {
+  private boolean failOnInit;
+  private boolean failOnStart;
+  private boolean failOnStop;
+  private final int[] counts = new int[4];
+
+  public BreakableService() {
+    this(false, false, false);
+  }
+
+  public BreakableService(boolean failOnInit,
+                          boolean failOnStart,
+                          boolean failOnStop) {
+    super("BreakableService");
+    this.failOnInit = failOnInit;
+    this.failOnStart = failOnStart;
+    this.failOnStop = failOnStop;
+    inc(STATE.NOTINITED);
+  }
+
+  private int convert(STATE state) {
+    switch (state) {
+      case NOTINITED: return 0;
+      case INITED:    return 1;
+      case STARTED:   return 2;
+      case STOPPED:   return 3;
+      default:        return 0;
+    }
+  }
+
+  private void inc(STATE state) {
+    int index = convert(state);
+    counts[index] ++;
+  }
+
+  public int getCount(STATE state) {
+    return counts[convert(state)];
+  }
+
+  private void maybeFail(boolean fail, String action) {
+    if (fail) {
+      throw new BrokenLifecycleEvent(action);
+    }
+  }
+
+  @Override
+  public void init(HiveConf conf) {
+    inc(STATE.INITED);
+    maybeFail(failOnInit, "init");
+    super.init(conf);
+  }
+
+  @Override
+  public void start() {
+    inc(STATE.STARTED);
+    maybeFail(failOnStart, "start");
+    super.start();
+  }
+
+  @Override
+  public void stop() {
+    inc(STATE.STOPPED);
+    maybeFail(failOnStop, "stop");
+    super.stop();
+  }
+
+  public void setFailOnInit(boolean failOnInit) {
+    this.failOnInit = failOnInit;
+  }
+
+  public void setFailOnStart(boolean failOnStart) {
+    this.failOnStart = failOnStart;
+  }
+
+  public void setFailOnStop(boolean failOnStop) {
+    this.failOnStop = failOnStop;
+  }
+
+  /**
+   * The exception explicitly raised on a failure
+   */
+  public static class BrokenLifecycleEvent extends RuntimeException {
+    BrokenLifecycleEvent(String action) {
+      super("Lifecycle Failure during " + action);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
new file mode 100644
index 0000000..8979118
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CompositeService.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * CompositeService.
+ *
+ */
+public class CompositeService extends AbstractService {
+
+  private static final Log LOG = LogFactory.getLog(CompositeService.class);
+
+  private final List<Service> serviceList = new ArrayList<Service>();
+
+  public CompositeService(String name) {
+    super(name);
+  }
+
+  public Collection<Service> getServices() {
+    return Collections.unmodifiableList(serviceList);
+  }
+
+  protected synchronized void addService(Service service) {
+    serviceList.add(service);
+  }
+
+  protected synchronized boolean removeService(Service service) {
+    return serviceList.remove(service);
+  }
+
+  @Override
+  public synchronized void init(HiveConf hiveConf) {
+    for (Service service : serviceList) {
+      service.init(hiveConf);
+    }
+    super.init(hiveConf);
+  }
+
+  @Override
+  public synchronized void start() {
+    int i = 0;
+    try {
+      for (int n = serviceList.size(); i < n; i++) {
+        Service service = serviceList.get(i);
+        service.start();
+      }
+      super.start();
+    } catch (Throwable e) {
+      LOG.error("Error starting services " + getName(), e);
+      // Note that the state of the failed service is still INITED and not
+      // STARTED. Even though the last service is not started completely, still
+      // call stop() on all services including failed service to make sure 
cleanup
+      // happens.
+      stop(i);
+      throw new ServiceException("Failed to Start " + getName(), e);
+    }
+
+  }
+
+  @Override
+  public synchronized void stop() {
+    if (this.getServiceState() == STATE.STOPPED) {
+      // The base composite-service is already stopped, don't do anything 
again.
+      return;
+    }
+    if (serviceList.size() > 0) {
+      stop(serviceList.size() - 1);
+    }
+    super.stop();
+  }
+
+  private synchronized void stop(int numOfServicesStarted) {
+    // stop in reserve order of start
+    for (int i = numOfServicesStarted; i >= 0; i--) {
+      Service service = serviceList.get(i);
+      try {
+        service.stop();
+      } catch (Throwable t) {
+        LOG.info("Error stopping " + service.getName(), t);
+      }
+    }
+  }
+
+  /**
+   * JVM Shutdown hook for CompositeService which will stop the given
+   * CompositeService gracefully in case of JVM shutdown.
+   */
+  public static class CompositeServiceShutdownHook implements Runnable {
+
+    private final CompositeService compositeService;
+
+    public CompositeServiceShutdownHook(CompositeService compositeService) {
+      this.compositeService = compositeService;
+    }
+
+    @Override
+    public void run() {
+      try {
+        // Stop the Composite Service
+        compositeService.stop();
+      } catch (Throwable t) {
+        LOG.info("Error stopping " + compositeService.getName(), t);
+      }
+    }
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
new file mode 100644
index 0000000..ee51c24
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/CookieSigner.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * The cookie signer generates a signature based on SHA digest
+ * and appends it to the cookie value generated at the
+ * server side. It uses SHA digest algorithm to sign and verify signatures.
+ */
+public class CookieSigner {
+  private static final String SIGNATURE = "&s=";
+  private static final String SHA_STRING = "SHA";
+  private byte[] secretBytes;
+  private static final Log LOG = LogFactory.getLog(CookieSigner.class);
+
+  /**
+   * Constructor
+   * @param secret Secret Bytes
+   */
+  public CookieSigner(byte[] secret) {
+    if (secret == null) {
+      throw new IllegalArgumentException(" NULL Secret Bytes");
+    }
+    this.secretBytes = secret.clone();
+  }
+
+  /**
+   * Sign the cookie given the string token as input.
+   * @param str Input token
+   * @return Signed token that can be used to create a cookie
+   */
+  public String signCookie(String str) {
+    if (str == null || str.isEmpty()) {
+      throw new IllegalArgumentException("NULL or empty string to sign");
+    }
+    String signature = getSignature(str);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Signature generated for " + str + " is " + signature);
+    }
+    return str + SIGNATURE + signature;
+  }
+
+  /**
+   * Verify a signed string and extracts the original string.
+   * @param signedStr The already signed string
+   * @return Raw Value of the string without the signature
+   */
+  public String verifyAndExtract(String signedStr) {
+    int index = signedStr.lastIndexOf(SIGNATURE);
+    if (index == -1) {
+      throw new IllegalArgumentException("Invalid input sign: " + signedStr);
+    }
+    String originalSignature = signedStr.substring(index + SIGNATURE.length());
+    String rawValue = signedStr.substring(0, index);
+    String currentSignature = getSignature(rawValue);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Signature generated for " + rawValue + " inside verify is " + 
currentSignature);
+    }
+    if (!originalSignature.equals(currentSignature)) {
+      throw new IllegalArgumentException("Invalid sign, original = " + 
originalSignature +
+        " current = " + currentSignature);
+    }
+    return rawValue;
+  }
+
+  /**
+   * Get the signature of the input string based on SHA digest algorithm.
+   * @param str Input token
+   * @return Signed String
+   */
+  private String getSignature(String str) {
+    try {
+      MessageDigest md = MessageDigest.getInstance(SHA_STRING);
+      md.update(str.getBytes());
+      md.update(secretBytes);
+      byte[] digest = md.digest();
+      return new Base64(0).encodeToString(digest);
+    } catch (NoSuchAlgorithmException ex) {
+      throw new RuntimeException("Invalid SHA digest String: " + SHA_STRING +
+        " " + ex.getMessage(), ex);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/FilterService.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/FilterService.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/FilterService.java
new file mode 100644
index 0000000..5a50874
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/FilterService.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * FilterService.
+ *
+ */
+public class FilterService implements Service {
+
+
+  private final Service service;
+  private final long startTime = System.currentTimeMillis();
+
+  public FilterService(Service service) {
+    this.service = service;
+  }
+
+  @Override
+  public void init(HiveConf config) {
+    service.init(config);
+  }
+
+  @Override
+  public void start() {
+    service.start();
+  }
+
+  @Override
+  public void stop() {
+    service.stop();
+  }
+
+
+  @Override
+  public void register(ServiceStateChangeListener listener) {
+    service.register(listener);
+  }
+
+  @Override
+  public void unregister(ServiceStateChangeListener listener) {
+    service.unregister(listener);
+  }
+
+  @Override
+  public String getName() {
+    return service.getName();
+  }
+
+  @Override
+  public HiveConf getHiveConf() {
+    return service.getHiveConf();
+  }
+
+  @Override
+  public STATE getServiceState() {
+    return service.getServiceState();
+  }
+
+  @Override
+  public long getStartTime() {
+    return startTime;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/Service.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/Service.java 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/Service.java
new file mode 100644
index 0000000..2111837
--- /dev/null
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/Service.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * Service.
+ *
+ */
+public interface Service {
+
+  /**
+   * Service states
+   */
+  public enum STATE {
+    /** Constructed but not initialized */
+    NOTINITED,
+
+    /** Initialized but not started or stopped */
+    INITED,
+
+    /** started and not stopped */
+    STARTED,
+
+    /** stopped. No further state transitions are permitted */
+    STOPPED
+  }
+
+  /**
+   * Initialize the service.
+   *
+   * The transition must be from {@link STATE#NOTINITED} to {@link 
STATE#INITED} unless the
+   * operation failed and an exception was raised.
+   *
+   * @param config
+   *          the configuration of the service
+   */
+  void init(HiveConf conf);
+
+
+  /**
+   * Start the service.
+   *
+   * The transition should be from {@link STATE#INITED} to {@link 
STATE#STARTED} unless the
+   * operation failed and an exception was raised.
+   */
+  void start();
+
+  /**
+   * Stop the service.
+   *
+   * This operation must be designed to complete regardless of the initial 
state
+   * of the service, including the state of all its internal fields.
+   */
+  void stop();
+
+  /**
+   * Register an instance of the service state change events.
+   *
+   * @param listener
+   *          a new listener
+   */
+  void register(ServiceStateChangeListener listener);
+
+  /**
+   * Unregister a previously instance of the service state change events.
+   *
+   * @param listener
+   *          the listener to unregister.
+   */
+  void unregister(ServiceStateChangeListener listener);
+
+  /**
+   * Get the name of this service.
+   *
+   * @return the service name
+   */
+  String getName();
+
+  /**
+   * Get the configuration of this service.
+   * This is normally not a clone and may be manipulated, though there are no
+   * guarantees as to what the consequences of such actions may be
+   *
+   * @return the current configuration, unless a specific implementation 
chooses
+   *         otherwise.
+   */
+  HiveConf getHiveConf();
+
+  /**
+   * Get the current service state
+   *
+   * @return the state of the service
+   */
+  STATE getServiceState();
+
+  /**
+   * Get the service start time
+   *
+   * @return the start time of the service. This will be zero if the service
+   *         has not yet been started.
+   */
+  long getStartTime();
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceException.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceException.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceException.java
new file mode 100644
index 0000000..3622cf8
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceException.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+/**
+ * ServiceException.
+ *
+ */
+public class ServiceException extends RuntimeException {
+
+  public ServiceException(Throwable cause) {
+    super(cause);
+  }
+
+  public ServiceException(String message) {
+    super(message);
+    }
+
+  public ServiceException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
new file mode 100644
index 0000000..8946219
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceOperations.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * ServiceOperations.
+ *
+ */
+public final class ServiceOperations {
+  private static final Log LOG = LogFactory.getLog(AbstractService.class);
+
+  private ServiceOperations() {
+  }
+
+  /**
+   * Verify that that a service is in a given state.
+   * @param state the actual state a service is in
+   * @param expectedState the desired state
+   * @throws IllegalStateException if the service state is different from
+   * the desired state
+   */
+  public static void ensureCurrentState(Service.STATE state,
+                                        Service.STATE expectedState) {
+    if (state != expectedState) {
+      throw new IllegalStateException("For this operation, the " +
+                                          "current service state must be "
+                                          + expectedState
+                                          + " instead of " + state);
+    }
+  }
+
+  /**
+   * Initialize a service.
+   * <p/>
+   * The service state is checked <i>before</i> the operation begins.
+   * This process is <i>not</i> thread safe.
+   * @param service a service that must be in the state
+   *   {@link Service.STATE#NOTINITED}
+   * @param configuration the configuration to initialize the service with
+   * @throws RuntimeException on a state change failure
+   * @throws IllegalStateException if the service is in the wrong state
+   */
+
+  public static void init(Service service, HiveConf configuration) {
+    Service.STATE state = service.getServiceState();
+    ensureCurrentState(state, Service.STATE.NOTINITED);
+    service.init(configuration);
+  }
+
+  /**
+   * Start a service.
+   * <p/>
+   * The service state is checked <i>before</i> the operation begins.
+   * This process is <i>not</i> thread safe.
+   * @param service a service that must be in the state
+   *   {@link Service.STATE#INITED}
+   * @throws RuntimeException on a state change failure
+   * @throws IllegalStateException if the service is in the wrong state
+   */
+
+  public static void start(Service service) {
+    Service.STATE state = service.getServiceState();
+    ensureCurrentState(state, Service.STATE.INITED);
+    service.start();
+  }
+
+  /**
+   * Initialize then start a service.
+   * <p/>
+   * The service state is checked <i>before</i> the operation begins.
+   * This process is <i>not</i> thread safe.
+   * @param service a service that must be in the state
+   *   {@link Service.STATE#NOTINITED}
+   * @param configuration the configuration to initialize the service with
+   * @throws RuntimeException on a state change failure
+   * @throws IllegalStateException if the service is in the wrong state
+   */
+  public static void deploy(Service service, HiveConf configuration) {
+    init(service, configuration);
+    start(service);
+  }
+
+  /**
+   * Stop a service.
+   * <p/>Do nothing if the service is null or not
+   * in a state in which it can be/needs to be stopped.
+   * <p/>
+   * The service state is checked <i>before</i> the operation begins.
+   * This process is <i>not</i> thread safe.
+   * @param service a service or null
+   */
+  public static void stop(Service service) {
+    if (service != null) {
+      Service.STATE state = service.getServiceState();
+      if (state == Service.STATE.STARTED) {
+        service.stop();
+      }
+    }
+  }
+
+  /**
+   * Stop a service; if it is null do nothing. Exceptions are caught and
+   * logged at warn level. (but not Throwables). This operation is intended to
+   * be used in cleanup operations
+   *
+   * @param service a service; may be null
+   * @return any exception that was caught; null if none was.
+   */
+  public static Exception stopQuietly(Service service) {
+    try {
+      stop(service);
+    } catch (Exception e) {
+      LOG.warn("When stopping the service " + service.getName()
+                   + " : " + e,
+               e);
+      return e;
+    }
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceStateChangeListener.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceStateChangeListener.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceStateChangeListener.java
new file mode 100644
index 0000000..16ad9a9
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceStateChangeListener.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service;
+
+/**
+ * ServiceStateChangeListener.
+ *
+ */
+public interface ServiceStateChangeListener {
+
+  /**
+   * Callback to notify of a state change. The service will already
+   * have changed state before this callback is invoked.
+   *
+   * This operation is invoked on the thread that initiated the state change,
+   * while the service itself in in a sychronized section.
+   * <ol>
+   *   <li>Any long-lived operation here will prevent the service state
+   *   change from completing in a timely manner.</li>
+   *   <li>If another thread is somehow invoked from the listener, and
+   *   that thread invokes the methods of the service (including
+   *   subclass-specific methods), there is a risk of a deadlock.</li>
+   * </ol>
+   *
+   *
+   * @param service the service that has changed.
+   */
+  void stateChanged(Service service);
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
new file mode 100644
index 0000000..e712aaf
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/ServiceUtils.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service;
+
+public class ServiceUtils {
+
+  /*
+   * Get the index separating the user name from domain name (the user's name 
up
+   * to the first '/' or '@').
+   *
+   * @param userName full user name.
+   * @return index of domain match or -1 if not found
+   */
+  public static int indexOfDomainMatch(String userName) {
+    if (userName == null) {
+      return -1;
+    }
+
+    int idx = userName.indexOf('/');
+    int idx2 = userName.indexOf('@');
+    int endIdx = Math.min(idx, idx2); // Use the earlier match.
+    // Unless at least one of '/' or '@' was not found, in
+    // which case, user the latter match.
+    if (endIdx == -1) {
+      endIdx = Math.max(idx, idx2);
+    }
+    return endIdx;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
new file mode 100644
index 0000000..c8f93ff
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+/**
+ * This authentication provider allows any combination of username and 
password.
+ */
+public class AnonymousAuthenticationProviderImpl implements 
PasswdAuthenticationProvider {
+
+  @Override
+  public void Authenticate(String user, String password) throws 
AuthenticationException {
+    // no-op authentication
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
new file mode 100644
index 0000000..4b95503
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+/**
+ * This class helps select a {@link PasswdAuthenticationProvider} for a given 
{@code AuthMethod}.
+ */
+public final class AuthenticationProviderFactory {
+
+  public enum AuthMethods {
+    LDAP("LDAP"),
+    PAM("PAM"),
+    CUSTOM("CUSTOM"),
+    NONE("NONE");
+
+    private final String authMethod;
+
+    AuthMethods(String authMethod) {
+      this.authMethod = authMethod;
+    }
+
+    public String getAuthMethod() {
+      return authMethod;
+    }
+
+    public static AuthMethods getValidAuthMethod(String authMethodStr)
+      throws AuthenticationException {
+      for (AuthMethods auth : AuthMethods.values()) {
+        if (authMethodStr.equals(auth.getAuthMethod())) {
+          return auth;
+        }
+      }
+      throw new AuthenticationException("Not a valid authentication method");
+    }
+  }
+
+  private AuthenticationProviderFactory() {
+  }
+
+  public static PasswdAuthenticationProvider 
getAuthenticationProvider(AuthMethods authMethod)
+    throws AuthenticationException {
+    if (authMethod == AuthMethods.LDAP) {
+      return new LdapAuthenticationProviderImpl();
+    } else if (authMethod == AuthMethods.PAM) {
+      return new PamAuthenticationProviderImpl();
+    } else if (authMethod == AuthMethods.CUSTOM) {
+      return new CustomAuthenticationProviderImpl();
+    } else if (authMethod == AuthMethods.NONE) {
+      return new AnonymousAuthenticationProviderImpl();
+    } else {
+      throw new AuthenticationException("Unsupported authentication method");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
new file mode 100644
index 0000000..3dc0aa8
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * This authentication provider implements the {@code CUSTOM} authentication. 
It allows a {@link
+ * PasswdAuthenticationProvider} to be specified at configuration time which 
may additionally
+ * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab 
Hive's {@link
+ * org.apache.hadoop.conf.Configuration Configuration}.
+ */
+public class CustomAuthenticationProviderImpl implements 
PasswdAuthenticationProvider {
+
+  private final PasswdAuthenticationProvider customProvider;
+
+  @SuppressWarnings("unchecked")
+  CustomAuthenticationProviderImpl() {
+    HiveConf conf = new HiveConf();
+    Class<? extends PasswdAuthenticationProvider> customHandlerClass =
+      (Class<? extends PasswdAuthenticationProvider>) conf.getClass(
+        HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
+        PasswdAuthenticationProvider.class);
+    customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
+  }
+
+  @Override
+  public void Authenticate(String user, String password) throws 
AuthenticationException {
+    customProvider.Authenticate(user, password);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7feeb82c/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
new file mode 100644
index 0000000..1e6ac4f
--- /dev/null
+++ 
b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -0,0 +1,364 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.net.ssl.SSLServerSocket;
+import javax.security.auth.login.LoginException;
+import javax.security.sasl.Sasl;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.thrift.DBTokenStore;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.thrift.ThriftCLIService;
+import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSSLTransportFactory;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.thrift.transport.TTransportFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class helps in some aspects of authentication. It creates the proper 
Thrift classes for the
+ * given configuration as well as helps with authenticating requests.
+ */
+public class HiveAuthFactory {
+  private static final Logger LOG = 
LoggerFactory.getLogger(HiveAuthFactory.class);
+
+
+  public enum AuthTypes {
+    NOSASL("NOSASL"),
+    NONE("NONE"),
+    LDAP("LDAP"),
+    KERBEROS("KERBEROS"),
+    CUSTOM("CUSTOM"),
+    PAM("PAM");
+
+    private final String authType;
+
+    AuthTypes(String authType) {
+      this.authType = authType;
+    }
+
+    public String getAuthName() {
+      return authType;
+    }
+
+  }
+
+  private HadoopThriftAuthBridge.Server saslServer;
+  private String authTypeStr;
+  private final String transportMode;
+  private final HiveConf conf;
+
+  public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
+  public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken";
+
+  public HiveAuthFactory(HiveConf conf) throws TTransportException {
+    this.conf = conf;
+    transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
+    authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
+
+    // In http mode we use NOSASL as the default auth type
+    if ("http".equalsIgnoreCase(transportMode)) {
+      if (authTypeStr == null) {
+        authTypeStr = AuthTypes.NOSASL.getAuthName();
+      }
+    } else {
+      if (authTypeStr == null) {
+        authTypeStr = AuthTypes.NONE.getAuthName();
+      }
+      if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+        saslServer = ShimLoader.getHadoopThriftAuthBridge()
+          .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+                        conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
+        // start delegation token manager
+        try {
+          // rawStore is only necessary for DBTokenStore
+          Object rawStore = null;
+          String tokenStoreClass = 
conf.getVar(HiveConf.ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS);
+
+          if (tokenStoreClass.equals(DBTokenStore.class.getName())) {
+            HMSHandler baseHandler = new HiveMetaStore.HMSHandler(
+                "new db based metaserver", conf, true);
+            rawStore = baseHandler.getMS();
+          }
+
+          saslServer.startDelegationTokenSecretManager(conf, rawStore, 
ServerMode.HIVESERVER2);
+        }
+        catch (MetaException|IOException e) {
+          throw new TTransportException("Failed to start token manager", e);
+        }
+      }
+    }
+  }
+
+  public Map<String, String> getSaslProperties() {
+    Map<String, String> saslProps = new HashMap<String, String>();
+    SaslQOP saslQOP = 
SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+    saslProps.put(Sasl.QOP, saslQOP.toString());
+    saslProps.put(Sasl.SERVER_AUTH, "true");
+    return saslProps;
+  }
+
+  public TTransportFactory getAuthTransFactory() throws LoginException {
+    TTransportFactory transportFactory;
+    if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+      try {
+        transportFactory = 
saslServer.createTransportFactory(getSaslProperties());
+      } catch (TTransportException e) {
+        throw new LoginException(e.getMessage());
+      }
+    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) {
+      transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) {
+      transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) {
+      transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) {
+      transportFactory = new TTransportFactory();
+    } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) {
+      transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr);
+    } else {
+      throw new LoginException("Unsupported authentication type " + 
authTypeStr);
+    }
+    return transportFactory;
+  }
+
+  /**
+   * Returns the thrift processor factory for HiveServer2 running in binary 
mode
+   * @param service
+   * @return
+   * @throws LoginException
+   */
+  public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws 
LoginException {
+    if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
+      return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, 
service);
+    } else {
+      return PlainSaslHelper.getPlainProcessorFactory(service);
+    }
+  }
+
+  public String getRemoteUser() {
+    return saslServer == null ? null : saslServer.getRemoteUser();
+  }
+
+  public String getIpAddress() {
+    if (saslServer == null || saslServer.getRemoteAddress() == null) {
+      return null;
+    } else {
+      return saslServer.getRemoteAddress().getHostAddress();
+    }
+  }
+
+  // Perform kerberos login using the hadoop shim API if the configuration is 
available
+  public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
+    String principal = 
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
+    String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 Kerberos principal or keytab is not 
correctly configured");
+    } else {
+      
UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal,
 "0.0.0.0"), keyTabFile);
+    }
+  }
+
+  // Perform SPNEGO login using the hadoop shim API if the configuration is 
available
+  public static UserGroupInformation 
loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
+    throws IOException {
+    String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
+    String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 SPNEGO principal or keytab is not 
correctly configured");
+    } else {
+      return 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal,
 "0.0.0.0"), keyTabFile);
+    }
+  }
+
+  public static TTransport getSocketTransport(String host, int port, int 
loginTimeout) {
+    return new TSocket(host, port, loginTimeout);
+  }
+
+  public static TTransport getSSLSocket(String host, int port, int 
loginTimeout)
+    throws TTransportException {
+    return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
+  }
+
+  public static TTransport getSSLSocket(String host, int port, int 
loginTimeout,
+    String trustStorePath, String trustStorePassWord) throws 
TTransportException {
+    TSSLTransportFactory.TSSLTransportParameters params =
+      new TSSLTransportFactory.TSSLTransportParameters();
+    params.setTrustStore(trustStorePath, trustStorePassWord);
+    params.requireClientAuth(true);
+    return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, 
params);
+  }
+
+  public static TServerSocket getServerSocket(String hiveHost, int portNum)
+    throws TTransportException {
+    InetSocketAddress serverAddress;
+    if (hiveHost == null || hiveHost.isEmpty()) {
+      // Wildcard bind
+      serverAddress = new InetSocketAddress(portNum);
+    } else {
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    }
+    return new TServerSocket(serverAddress);
+  }
+
+  public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, 
String keyStorePath,
+      String keyStorePassWord, List<String> sslVersionBlacklist) throws 
TTransportException,
+      UnknownHostException {
+    TSSLTransportFactory.TSSLTransportParameters params =
+        new TSSLTransportFactory.TSSLTransportParameters();
+    params.setKeyStore(keyStorePath, keyStorePassWord);
+    InetSocketAddress serverAddress;
+    if (hiveHost == null || hiveHost.isEmpty()) {
+      // Wildcard bind
+      serverAddress = new InetSocketAddress(portNum);
+    } else {
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    }
+    TServerSocket thriftServerSocket =
+        TSSLTransportFactory.getServerSocket(portNum, 0, 
serverAddress.getAddress(), params);
+    if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) {
+      List<String> sslVersionBlacklistLocal = new ArrayList<String>();
+      for (String sslVersion : sslVersionBlacklist) {
+        sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase());
+      }
+      SSLServerSocket sslServerSocket = (SSLServerSocket) 
thriftServerSocket.getServerSocket();
+      List<String> enabledProtocols = new ArrayList<String>();
+      for (String protocol : sslServerSocket.getEnabledProtocols()) {
+        if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) {
+          LOG.debug("Disabling SSL Protocol: " + protocol);
+        } else {
+          enabledProtocols.add(protocol);
+        }
+      }
+      sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new 
String[0]));
+      LOG.info("SSL Server Socket Enabled Protocols: "
+          + Arrays.toString(sslServerSocket.getEnabledProtocols()));
+    }
+    return thriftServerSocket;
+  }
+
+  // retrieve delegation token for the given user
+  public String getDelegationToken(String owner, String renewer) throws 
HiveSQLException {
+    if (saslServer == null) {
+      throw new HiveSQLException(
+          "Delegation token only supported over kerberos authentication", 
"08S01");
+    }
+
+    try {
+      String tokenStr = saslServer.getDelegationTokenWithService(owner, 
renewer, HS2_CLIENT_TOKEN);
+      if (tokenStr == null || tokenStr.isEmpty()) {
+        throw new HiveSQLException(
+            "Received empty retrieving delegation token for user " + owner, 
"08S01");
+      }
+      return tokenStr;
+    } catch (IOException e) {
+      throw new HiveSQLException(
+          "Error retrieving delegation token for user " + owner, "08S01", e);
+    } catch (InterruptedException e) {
+      throw new HiveSQLException("delegation token retrieval interrupted", 
"08S01", e);
+    }
+  }
+
+  // cancel given delegation token
+  public void cancelDelegationToken(String delegationToken) throws 
HiveSQLException {
+    if (saslServer == null) {
+      throw new HiveSQLException(
+          "Delegation token only supported over kerberos authentication", 
"08S01");
+    }
+    try {
+      saslServer.cancelDelegationToken(delegationToken);
+    } catch (IOException e) {
+      throw new HiveSQLException(
+          "Error canceling delegation token " + delegationToken, "08S01", e);
+    }
+  }
+
+  public void renewDelegationToken(String delegationToken) throws 
HiveSQLException {
+    if (saslServer == null) {
+      throw new HiveSQLException(
+          "Delegation token only supported over kerberos authentication", 
"08S01");
+    }
+    try {
+      saslServer.renewDelegationToken(delegationToken);
+    } catch (IOException e) {
+      throw new HiveSQLException(
+          "Error renewing delegation token " + delegationToken, "08S01", e);
+    }
+  }
+
+  public String getUserFromToken(String delegationToken) throws 
HiveSQLException {
+    if (saslServer == null) {
+      throw new HiveSQLException(
+          "Delegation token only supported over kerberos authentication", 
"08S01");
+    }
+    try {
+      return saslServer.getUserFromToken(delegationToken);
+    } catch (IOException e) {
+      throw new HiveSQLException(
+          "Error extracting user from delegation token " + delegationToken, 
"08S01", e);
+    }
+  }
+
+  public static void verifyProxyAccess(String realUser, String proxyUser, 
String ipAddress,
+    HiveConf hiveConf) throws HiveSQLException {
+    try {
+      UserGroupInformation sessionUgi;
+      if (UserGroupInformation.isSecurityEnabled()) {
+        KerberosNameShim kerbName = 
ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
+        sessionUgi = UserGroupInformation.createProxyUser(
+            kerbName.getServiceName(), UserGroupInformation.getLoginUser());
+      } else {
+        sessionUgi = UserGroupInformation.createRemoteUser(realUser);
+      }
+      if (!proxyUser.equalsIgnoreCase(realUser)) {
+        ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
+        ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, 
sessionUgi),
+            ipAddress, hiveConf);
+      }
+    } catch (IOException e) {
+      throw new HiveSQLException(
+        "Failed to validate proxy privilege of " + realUser + " for " + 
proxyUser, "08S01", e);
+    }
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to