http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/ValueConstraint.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/ValueConstraint.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/ValueConstraint.java
new file mode 100644
index 0000000..7918fec
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/ValueConstraint.java
@@ -0,0 +1,566 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.airavata.cloud.aurora.client.sdk;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+/**
+ * A constraint that specifies an explicit set of values, at least one of 
which must be present
+ * on a host for a task to be scheduled there.
+ */
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = 
"2016-10-21")
+public class ValueConstraint implements 
org.apache.thrift.TBase<ValueConstraint, ValueConstraint._Fields>, 
java.io.Serializable, Cloneable, Comparable<ValueConstraint> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("ValueConstraint");
+
+  private static final org.apache.thrift.protocol.TField NEGATED_FIELD_DESC = 
new org.apache.thrift.protocol.TField("negated", 
org.apache.thrift.protocol.TType.BOOL, (short)1);
+  private static final org.apache.thrift.protocol.TField VALUES_FIELD_DESC = 
new org.apache.thrift.protocol.TField("values", 
org.apache.thrift.protocol.TType.SET, (short)2);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = 
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new 
ValueConstraintStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new ValueConstraintTupleSchemeFactory());
+  }
+
+  /**
+   * If true, treat this as a 'not' - to avoid specific values.
+   */
+  public boolean negated; // required
+  public Set<String> values; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    /**
+     * If true, treat this as a 'not' - to avoid specific values.
+     */
+    NEGATED((short)1, "negated"),
+    VALUES((short)2, "values");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not 
found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // NEGATED
+          return NEGATED;
+        case 2: // VALUES
+          return VALUES;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __NEGATED_ISSET_ID = 0;
+  private byte __isset_bitfield = 0;
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.NEGATED, new 
org.apache.thrift.meta_data.FieldMetaData("negated", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+    tmpMap.put(_Fields.VALUES, new 
org.apache.thrift.meta_data.FieldMetaData("values", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new 
org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, 
+            new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ValueConstraint.class,
 metaDataMap);
+  }
+
+  public ValueConstraint() {
+  }
+
+  public ValueConstraint(
+    boolean negated,
+    Set<String> values)
+  {
+    this();
+    this.negated = negated;
+    setNegatedIsSet(true);
+    this.values = values;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public ValueConstraint(ValueConstraint other) {
+    __isset_bitfield = other.__isset_bitfield;
+    this.negated = other.negated;
+    if (other.isSetValues()) {
+      Set<String> __this__values = new HashSet<String>(other.values);
+      this.values = __this__values;
+    }
+  }
+
+  public ValueConstraint deepCopy() {
+    return new ValueConstraint(this);
+  }
+
+  @Override
+  public void clear() {
+    setNegatedIsSet(false);
+    this.negated = false;
+    this.values = null;
+  }
+
+  /**
+   * If true, treat this as a 'not' - to avoid specific values.
+   */
+  public boolean isNegated() {
+    return this.negated;
+  }
+
+  /**
+   * If true, treat this as a 'not' - to avoid specific values.
+   */
+  public ValueConstraint setNegated(boolean negated) {
+    this.negated = negated;
+    setNegatedIsSet(true);
+    return this;
+  }
+
+  public void unsetNegated() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, 
__NEGATED_ISSET_ID);
+  }
+
+  /** Returns true if field negated is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetNegated() {
+    return EncodingUtils.testBit(__isset_bitfield, __NEGATED_ISSET_ID);
+  }
+
+  public void setNegatedIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, 
__NEGATED_ISSET_ID, value);
+  }
+
+  public int getValuesSize() {
+    return (this.values == null) ? 0 : this.values.size();
+  }
+
+  public java.util.Iterator<String> getValuesIterator() {
+    return (this.values == null) ? null : this.values.iterator();
+  }
+
+  public void addToValues(String elem) {
+    if (this.values == null) {
+      this.values = new HashSet<String>();
+    }
+    this.values.add(elem);
+  }
+
+  public Set<String> getValues() {
+    return this.values;
+  }
+
+  public ValueConstraint setValues(Set<String> values) {
+    this.values = values;
+    return this;
+  }
+
+  public void unsetValues() {
+    this.values = null;
+  }
+
+  /** Returns true if field values is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetValues() {
+    return this.values != null;
+  }
+
+  public void setValuesIsSet(boolean value) {
+    if (!value) {
+      this.values = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case NEGATED:
+      if (value == null) {
+        unsetNegated();
+      } else {
+        setNegated((Boolean)value);
+      }
+      break;
+
+    case VALUES:
+      if (value == null) {
+        unsetValues();
+      } else {
+        setValues((Set<String>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case NEGATED:
+      return isNegated();
+
+    case VALUES:
+      return getValues();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned 
a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case NEGATED:
+      return isSetNegated();
+    case VALUES:
+      return isSetValues();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof ValueConstraint)
+      return this.equals((ValueConstraint)that);
+    return false;
+  }
+
+  public boolean equals(ValueConstraint that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_negated = true;
+    boolean that_present_negated = true;
+    if (this_present_negated || that_present_negated) {
+      if (!(this_present_negated && that_present_negated))
+        return false;
+      if (this.negated != that.negated)
+        return false;
+    }
+
+    boolean this_present_values = true && this.isSetValues();
+    boolean that_present_values = true && that.isSetValues();
+    if (this_present_values || that_present_values) {
+      if (!(this_present_values && that_present_values))
+        return false;
+      if (!this.values.equals(that.values))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_negated = true;
+    list.add(present_negated);
+    if (present_negated)
+      list.add(negated);
+
+    boolean present_values = true && (isSetValues());
+    list.add(present_values);
+    if (present_values)
+      list.add(values);
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(ValueConstraint other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = 
Boolean.valueOf(isSetNegated()).compareTo(other.isSetNegated());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetNegated()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.negated, 
other.negated);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetValues()).compareTo(other.isSetValues());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetValues()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.values, 
other.values);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("ValueConstraint(");
+    boolean first = true;
+
+    sb.append("negated:");
+    sb.append(this.negated);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("values:");
+    if (this.values == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.values);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java 
serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class ValueConstraintStandardSchemeFactory implements 
SchemeFactory {
+    public ValueConstraintStandardScheme getScheme() {
+      return new ValueConstraintStandardScheme();
+    }
+  }
+
+  private static class ValueConstraintStandardScheme extends 
StandardScheme<ValueConstraint> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, 
ValueConstraint struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // NEGATED
+            if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
+              struct.negated = iprot.readBool();
+              struct.setNegatedIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 2: // VALUES
+            if (schemeField.type == org.apache.thrift.protocol.TType.SET) {
+              {
+                org.apache.thrift.protocol.TSet _set16 = iprot.readSetBegin();
+                struct.values = new HashSet<String>(2*_set16.size);
+                String _elem17;
+                for (int _i18 = 0; _i18 < _set16.size; ++_i18)
+                {
+                  _elem17 = iprot.readString();
+                  struct.values.add(_elem17);
+                }
+                iprot.readSetEnd();
+              }
+              struct.setValuesIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked 
in the validate method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, 
ValueConstraint struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      oprot.writeFieldBegin(NEGATED_FIELD_DESC);
+      oprot.writeBool(struct.negated);
+      oprot.writeFieldEnd();
+      if (struct.values != null) {
+        oprot.writeFieldBegin(VALUES_FIELD_DESC);
+        {
+          oprot.writeSetBegin(new 
org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, 
struct.values.size()));
+          for (String _iter19 : struct.values)
+          {
+            oprot.writeString(_iter19);
+          }
+          oprot.writeSetEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class ValueConstraintTupleSchemeFactory implements 
SchemeFactory {
+    public ValueConstraintTupleScheme getScheme() {
+      return new ValueConstraintTupleScheme();
+    }
+  }
+
+  private static class ValueConstraintTupleScheme extends 
TupleScheme<ValueConstraint> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, 
ValueConstraint struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetNegated()) {
+        optionals.set(0);
+      }
+      if (struct.isSetValues()) {
+        optionals.set(1);
+      }
+      oprot.writeBitSet(optionals, 2);
+      if (struct.isSetNegated()) {
+        oprot.writeBool(struct.negated);
+      }
+      if (struct.isSetValues()) {
+        {
+          oprot.writeI32(struct.values.size());
+          for (String _iter20 : struct.values)
+          {
+            oprot.writeString(_iter20);
+          }
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, 
ValueConstraint struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(2);
+      if (incoming.get(0)) {
+        struct.negated = iprot.readBool();
+        struct.setNegatedIsSet(true);
+      }
+      if (incoming.get(1)) {
+        {
+          org.apache.thrift.protocol.TSet _set21 = new 
org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRING, 
iprot.readI32());
+          struct.values = new HashSet<String>(2*_set21.size);
+          String _elem22;
+          for (int _i23 = 0; _i23 < _set21.size; ++_i23)
+          {
+            _elem22 = iprot.readString();
+            struct.values.add(_elem22);
+          }
+        }
+        struct.setValuesIsSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/Volume.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/Volume.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/Volume.java
new file mode 100644
index 0000000..4286404
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/Volume.java
@@ -0,0 +1,655 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.airavata.cloud.aurora.client.sdk;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+/**
+ * A volume mount point within a container
+ */
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = 
"2016-10-21")
+public class Volume implements org.apache.thrift.TBase<Volume, 
Volume._Fields>, java.io.Serializable, Cloneable, Comparable<Volume> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("Volume");
+
+  private static final org.apache.thrift.protocol.TField 
CONTAINER_PATH_FIELD_DESC = new 
org.apache.thrift.protocol.TField("containerPath", 
org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField HOST_PATH_FIELD_DESC 
= new org.apache.thrift.protocol.TField("hostPath", 
org.apache.thrift.protocol.TType.STRING, (short)2);
+  private static final org.apache.thrift.protocol.TField MODE_FIELD_DESC = new 
org.apache.thrift.protocol.TField("mode", org.apache.thrift.protocol.TType.I32, 
(short)3);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = 
new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new VolumeStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new VolumeTupleSchemeFactory());
+  }
+
+  /**
+   * The path inside the container where the mount will be created.
+   */
+  public String containerPath; // required
+  /**
+   * The path on the host that will serve as the source for the mount.
+   */
+  public String hostPath; // required
+  /**
+   * The access mode
+   * 
+   * @see Mode
+   */
+  public Mode mode; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    /**
+     * The path inside the container where the mount will be created.
+     */
+    CONTAINER_PATH((short)1, "containerPath"),
+    /**
+     * The path on the host that will serve as the source for the mount.
+     */
+    HOST_PATH((short)2, "hostPath"),
+    /**
+     * The access mode
+     * 
+     * @see Mode
+     */
+    MODE((short)3, "mode");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, 
_Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not 
found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // CONTAINER_PATH
+          return CONTAINER_PATH;
+        case 2: // HOST_PATH
+          return HOST_PATH;
+        case 3: // MODE
+          return MODE;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.CONTAINER_PATH, new 
org.apache.thrift.meta_data.FieldMetaData("containerPath", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.HOST_PATH, new 
org.apache.thrift.meta_data.FieldMetaData("hostPath", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new 
org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.MODE, new 
org.apache.thrift.meta_data.FieldMetaData("mode", 
org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new 
org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, 
Mode.class)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Volume.class, 
metaDataMap);
+  }
+
+  public Volume() {
+  }
+
+  public Volume(
+    String containerPath,
+    String hostPath,
+    Mode mode)
+  {
+    this();
+    this.containerPath = containerPath;
+    this.hostPath = hostPath;
+    this.mode = mode;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public Volume(Volume other) {
+    if (other.isSetContainerPath()) {
+      this.containerPath = other.containerPath;
+    }
+    if (other.isSetHostPath()) {
+      this.hostPath = other.hostPath;
+    }
+    if (other.isSetMode()) {
+      this.mode = other.mode;
+    }
+  }
+
+  public Volume deepCopy() {
+    return new Volume(this);
+  }
+
+  @Override
+  public void clear() {
+    this.containerPath = null;
+    this.hostPath = null;
+    this.mode = null;
+  }
+
+  /**
+   * The path inside the container where the mount will be created.
+   */
+  public String getContainerPath() {
+    return this.containerPath;
+  }
+
+  /**
+   * The path inside the container where the mount will be created.
+   */
+  public Volume setContainerPath(String containerPath) {
+    this.containerPath = containerPath;
+    return this;
+  }
+
+  public void unsetContainerPath() {
+    this.containerPath = null;
+  }
+
+  /** Returns true if field containerPath is set (has been assigned a value) 
and false otherwise */
+  public boolean isSetContainerPath() {
+    return this.containerPath != null;
+  }
+
+  public void setContainerPathIsSet(boolean value) {
+    if (!value) {
+      this.containerPath = null;
+    }
+  }
+
+  /**
+   * The path on the host that will serve as the source for the mount.
+   */
+  public String getHostPath() {
+    return this.hostPath;
+  }
+
+  /**
+   * The path on the host that will serve as the source for the mount.
+   */
+  public Volume setHostPath(String hostPath) {
+    this.hostPath = hostPath;
+    return this;
+  }
+
+  public void unsetHostPath() {
+    this.hostPath = null;
+  }
+
+  /** Returns true if field hostPath is set (has been assigned a value) and 
false otherwise */
+  public boolean isSetHostPath() {
+    return this.hostPath != null;
+  }
+
+  public void setHostPathIsSet(boolean value) {
+    if (!value) {
+      this.hostPath = null;
+    }
+  }
+
+  /**
+   * The access mode
+   * 
+   * @see Mode
+   */
+  public Mode getMode() {
+    return this.mode;
+  }
+
+  /**
+   * The access mode
+   * 
+   * @see Mode
+   */
+  public Volume setMode(Mode mode) {
+    this.mode = mode;
+    return this;
+  }
+
+  public void unsetMode() {
+    this.mode = null;
+  }
+
+  /** Returns true if field mode is set (has been assigned a value) and false 
otherwise */
+  public boolean isSetMode() {
+    return this.mode != null;
+  }
+
+  public void setModeIsSet(boolean value) {
+    if (!value) {
+      this.mode = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case CONTAINER_PATH:
+      if (value == null) {
+        unsetContainerPath();
+      } else {
+        setContainerPath((String)value);
+      }
+      break;
+
+    case HOST_PATH:
+      if (value == null) {
+        unsetHostPath();
+      } else {
+        setHostPath((String)value);
+      }
+      break;
+
+    case MODE:
+      if (value == null) {
+        unsetMode();
+      } else {
+        setMode((Mode)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case CONTAINER_PATH:
+      return getContainerPath();
+
+    case HOST_PATH:
+      return getHostPath();
+
+    case MODE:
+      return getMode();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned 
a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case CONTAINER_PATH:
+      return isSetContainerPath();
+    case HOST_PATH:
+      return isSetHostPath();
+    case MODE:
+      return isSetMode();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof Volume)
+      return this.equals((Volume)that);
+    return false;
+  }
+
+  public boolean equals(Volume that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_containerPath = true && this.isSetContainerPath();
+    boolean that_present_containerPath = true && that.isSetContainerPath();
+    if (this_present_containerPath || that_present_containerPath) {
+      if (!(this_present_containerPath && that_present_containerPath))
+        return false;
+      if (!this.containerPath.equals(that.containerPath))
+        return false;
+    }
+
+    boolean this_present_hostPath = true && this.isSetHostPath();
+    boolean that_present_hostPath = true && that.isSetHostPath();
+    if (this_present_hostPath || that_present_hostPath) {
+      if (!(this_present_hostPath && that_present_hostPath))
+        return false;
+      if (!this.hostPath.equals(that.hostPath))
+        return false;
+    }
+
+    boolean this_present_mode = true && this.isSetMode();
+    boolean that_present_mode = true && that.isSetMode();
+    if (this_present_mode || that_present_mode) {
+      if (!(this_present_mode && that_present_mode))
+        return false;
+      if (!this.mode.equals(that.mode))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    List<Object> list = new ArrayList<Object>();
+
+    boolean present_containerPath = true && (isSetContainerPath());
+    list.add(present_containerPath);
+    if (present_containerPath)
+      list.add(containerPath);
+
+    boolean present_hostPath = true && (isSetHostPath());
+    list.add(present_hostPath);
+    if (present_hostPath)
+      list.add(hostPath);
+
+    boolean present_mode = true && (isSetMode());
+    list.add(present_mode);
+    if (present_mode)
+      list.add(mode.getValue());
+
+    return list.hashCode();
+  }
+
+  @Override
+  public int compareTo(Volume other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = 
Boolean.valueOf(isSetContainerPath()).compareTo(other.isSetContainerPath());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetContainerPath()) {
+      lastComparison = 
org.apache.thrift.TBaseHelper.compareTo(this.containerPath, 
other.containerPath);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = 
Boolean.valueOf(isSetHostPath()).compareTo(other.isSetHostPath());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetHostPath()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.hostPath, 
other.hostPath);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetMode()).compareTo(other.isSetMode());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMode()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mode, 
other.mode);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws 
org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws 
org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Volume(");
+    boolean first = true;
+
+    sb.append("containerPath:");
+    if (this.containerPath == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.containerPath);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("hostPath:");
+    if (this.hostPath == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.hostPath);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("mode:");
+    if (this.mode == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.mode);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws 
java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws 
java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new 
org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class VolumeStandardSchemeFactory implements SchemeFactory {
+    public VolumeStandardScheme getScheme() {
+      return new VolumeStandardScheme();
+    }
+  }
+
+  private static class VolumeStandardScheme extends StandardScheme<Volume> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, Volume 
struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // CONTAINER_PATH
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.containerPath = iprot.readString();
+              struct.setContainerPathIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 2: // HOST_PATH
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.hostPath = iprot.readString();
+              struct.setHostPathIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          case 3: // MODE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.mode = 
org.apache.airavata.cloud.aurora.client.sdk.Mode.findByValue(iprot.readI32());
+              struct.setModeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, 
schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+
+      // check for required fields of primitive type, which can't be checked 
in the validate method
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, Volume 
struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.containerPath != null) {
+        oprot.writeFieldBegin(CONTAINER_PATH_FIELD_DESC);
+        oprot.writeString(struct.containerPath);
+        oprot.writeFieldEnd();
+      }
+      if (struct.hostPath != null) {
+        oprot.writeFieldBegin(HOST_PATH_FIELD_DESC);
+        oprot.writeString(struct.hostPath);
+        oprot.writeFieldEnd();
+      }
+      if (struct.mode != null) {
+        oprot.writeFieldBegin(MODE_FIELD_DESC);
+        oprot.writeI32(struct.mode.getValue());
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class VolumeTupleSchemeFactory implements SchemeFactory {
+    public VolumeTupleScheme getScheme() {
+      return new VolumeTupleScheme();
+    }
+  }
+
+  private static class VolumeTupleScheme extends TupleScheme<Volume> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, Volume 
struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetContainerPath()) {
+        optionals.set(0);
+      }
+      if (struct.isSetHostPath()) {
+        optionals.set(1);
+      }
+      if (struct.isSetMode()) {
+        optionals.set(2);
+      }
+      oprot.writeBitSet(optionals, 3);
+      if (struct.isSetContainerPath()) {
+        oprot.writeString(struct.containerPath);
+      }
+      if (struct.isSetHostPath()) {
+        oprot.writeString(struct.hostPath);
+      }
+      if (struct.isSetMode()) {
+        oprot.writeI32(struct.mode.getValue());
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, Volume struct) 
throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(3);
+      if (incoming.get(0)) {
+        struct.containerPath = iprot.readString();
+        struct.setContainerPathIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.hostPath = iprot.readString();
+        struct.setHostPathIsSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.mode = 
org.apache.airavata.cloud.aurora.client.sdk.Mode.findByValue(iprot.readI32());
+        struct.setModeIsSet(true);
+      }
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/aurora_apiConstants.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/aurora_apiConstants.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/aurora_apiConstants.java
new file mode 100644
index 0000000..cec1306
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/aurora_apiConstants.java
@@ -0,0 +1,105 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.airavata.cloud.aurora.client.sdk;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+public class aurora_apiConstants {
+
+  public static final String AURORA_EXECUTOR_NAME = "AuroraExecutor";
+
+  public static final Set<ScheduleStatus> ACTIVE_STATES = new 
HashSet<ScheduleStatus>();
+  static {
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.ASSIGNED);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.DRAINING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.KILLING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.PENDING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.PREEMPTING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RESTARTING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RUNNING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.STARTING);
+    
ACTIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.THROTTLED);
+  }
+
+  public static final Set<ScheduleStatus> SLAVE_ASSIGNED_STATES = new 
HashSet<ScheduleStatus>();
+  static {
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.ASSIGNED);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.DRAINING);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.KILLING);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.PREEMPTING);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RESTARTING);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RUNNING);
+    
SLAVE_ASSIGNED_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.STARTING);
+  }
+
+  public static final Set<ScheduleStatus> LIVE_STATES = new 
HashSet<ScheduleStatus>();
+  static {
+    
LIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.KILLING);
+    
LIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.PREEMPTING);
+    
LIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RESTARTING);
+    
LIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.DRAINING);
+    
LIVE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.RUNNING);
+  }
+
+  public static final Set<ScheduleStatus> TERMINAL_STATES = new 
HashSet<ScheduleStatus>();
+  static {
+    
TERMINAL_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.FAILED);
+    
TERMINAL_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.FINISHED);
+    
TERMINAL_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.KILLED);
+    
TERMINAL_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.ScheduleStatus.LOST);
+  }
+
+  public static final String GOOD_IDENTIFIER_PATTERN = "^[\\w\\-\\.]+$";
+
+  public static final String GOOD_IDENTIFIER_PATTERN_JVM = "^[\\w\\-\\.]+$";
+
+  public static final String GOOD_IDENTIFIER_PATTERN_PYTHON = "^[\\w\\-\\.]+$";
+
+  /**
+   * States the job update can be in while still considered active.
+   */
+  public static final Set<JobUpdateStatus> ACTIVE_JOB_UPDATE_STATES = new 
HashSet<JobUpdateStatus>();
+  static {
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLLING_FORWARD);
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLLING_BACK);
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLL_FORWARD_PAUSED);
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLL_BACK_PAUSED);
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLL_FORWARD_AWAITING_PULSE);
+    
ACTIVE_JOB_UPDATE_STATES.add(org.apache.airavata.cloud.aurora.client.sdk.JobUpdateStatus.ROLL_BACK_AWAITING_PULSE);
+  }
+
+  public static final String BYPASS_LEADER_REDIRECT_HEADER_NAME = 
"Bypass-Leader-Redirect";
+
+  public static final String TASK_FILESYSTEM_MOUNT_POINT = "taskfs";
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/sample/AuroraClientSample.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/sample/AuroraClientSample.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/sample/AuroraClientSample.java
new file mode 100644
index 0000000..19455f7
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/sample/AuroraClientSample.java
@@ -0,0 +1,138 @@
+package org.apache.airavata.cloud.aurora.sample;
+
+import java.text.MessageFormat;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.airavata.cloud.aurora.client.AuroraSchedulerClientFactory;
+import org.apache.airavata.cloud.aurora.client.AuroraThriftClient;
+import org.apache.airavata.cloud.aurora.client.bean.IdentityBean;
+import org.apache.airavata.cloud.aurora.client.bean.JobConfigBean;
+import org.apache.airavata.cloud.aurora.client.bean.JobKeyBean;
+import org.apache.airavata.cloud.aurora.client.bean.ProcessBean;
+import org.apache.airavata.cloud.aurora.client.bean.ResourceBean;
+import org.apache.airavata.cloud.aurora.client.bean.ResponseBean;
+import org.apache.airavata.cloud.aurora.client.bean.TaskConfigBean;
+import org.apache.airavata.cloud.aurora.client.sdk.ExecutorConfig;
+import org.apache.airavata.cloud.aurora.client.sdk.GetJobsResult;
+import org.apache.airavata.cloud.aurora.client.sdk.Identity;
+import org.apache.airavata.cloud.aurora.client.sdk.JobConfiguration;
+import org.apache.airavata.cloud.aurora.client.sdk.JobKey;
+import org.apache.airavata.cloud.aurora.client.sdk.ReadOnlyScheduler;
+import org.apache.airavata.cloud.aurora.client.sdk.Response;
+import org.apache.airavata.cloud.aurora.client.sdk.TaskConfig;
+import org.apache.airavata.cloud.aurora.util.AuroraThriftClientUtil;
+import org.apache.airavata.cloud.aurora.util.Constants;
+import org.apache.thrift.TException;
+
+/**
+ * The Class AuroraClientSample.
+ */
+public class AuroraClientSample {
+       
+       /** The aurora scheduler client. */
+       private static ReadOnlyScheduler.Client auroraSchedulerClient;
+       
+       /** The properties. */
+       private static Properties properties = new Properties();
+       
+       /**
+        * Gets the job summary.
+        *
+        * @param client the client
+        * @return the job summary
+        */
+       public static void getJobSummary(ReadOnlyScheduler.Client client) {
+               try {
+                       Response response = client.getJobs("centos");
+                       System.out.println("Response status: " + 
response.getResponseCode().name());
+                       if(response.getResult().isSetGetJobsResult()) {
+                               GetJobsResult result = 
response.getResult().getGetJobsResult();
+                               System.out.println(result);
+                               Set<JobConfiguration> jobConfigs = 
result.getConfigs();
+                               for(JobConfiguration jobConfig : jobConfigs) {
+                                       System.out.println(jobConfig);
+                                       JobKey jobKey = jobConfig.getKey();
+                                       Identity owner = jobConfig.getOwner();
+                                       TaskConfig taskConfig = 
jobConfig.getTaskConfig();
+                                       ExecutorConfig exeConfig = 
taskConfig.getExecutorConfig();
+                                       
+                                       System.out.println("\n**** JOB CONFIG 
****");
+                                               System.out.println("\t # 
instanceCount: " + jobConfig.getInstanceCount());
+                                               
+                                               System.out.println("\t >> Job 
Key <<");
+                                                       
System.out.println("\t\t # name: " + jobKey.getName());
+                                                       
System.out.println("\t\t # role: " + jobKey.getRole());
+                                                       
System.out.println("\t\t # environment: " + jobKey.getEnvironment());
+                                                       
+                                               System.out.println("\t >> 
Identity <<");
+                                                       
System.out.println("\t\t # owner: " + owner.getUser());
+                                                       
+                                               System.out.println("\t >> Task 
Config <<");
+                                                       
System.out.println("\t\t # numCPUs: " + taskConfig.getNumCpus());
+                                                       
System.out.println("\t\t # diskMb: " + taskConfig.getDiskMb());
+                                                       
System.out.println("\t\t # ramMb: " + taskConfig.getRamMb());
+                                                       
System.out.println("\t\t # priority: " + taskConfig.getPriority());
+                                                       
+                                                       
+                                               System.out.println("\t >> 
Executor Config <<");
+                                                       
System.out.println("\t\t # name: " + exeConfig.getName());
+                                                       
System.out.println("\t\t # data: " + exeConfig.getData());
+                               }
+                               
+                       }
+               } catch (TException e) {
+                       e.printStackTrace();
+               }
+       }
+       
+       public static void createJob() throws Exception {
+               JobKeyBean jobKey = new JobKeyBean("devel", "centos", 
"test_job");
+               IdentityBean owner = new IdentityBean("centos");
+               
+               ProcessBean proc1 = new ProcessBean("process_1", "echo 
'hello_world_1'", false);
+               ProcessBean proc2 = new ProcessBean("process_2", "echo 
'hello_world_2'", false);
+               Set<ProcessBean> processes = new HashSet<>();
+               processes.add(proc1);
+               processes.add(proc2);
+               
+               ResourceBean resources = new ResourceBean(0.1, 8, 1);
+               
+               TaskConfigBean taskConfig = new 
TaskConfigBean("task_hello_world", processes, resources);
+               JobConfigBean jobConfig = new JobConfigBean(jobKey, owner, 
taskConfig, "example");
+               
+               String executorConfigJson = 
AuroraThriftClientUtil.getExecutorConfigJson(jobConfig);
+               System.out.println(executorConfigJson);
+               
+               AuroraThriftClient client = 
AuroraThriftClient.getAuroraThriftClient(Constants.AURORA_SCHEDULER_PROP_FILE);
+               ResponseBean response = client.createJob(jobConfig);
+               System.out.println(response);
+       }
+       
+       /**
+        * The main method.
+        *
+        * @param args the arguments
+        */
+       public static void main(String[] args) {
+                try {
+                       
properties.load(AuroraClientSample.class.getClassLoader().getResourceAsStream(Constants.AURORA_SCHEDULER_PROP_FILE));
+                       String auroraHost = 
properties.getProperty(Constants.AURORA_SCHEDULER_HOST);
+                       String auroraPort = 
properties.getProperty(Constants.AURORA_SCHEDULER_PORT);
+                       auroraSchedulerClient = 
AuroraSchedulerClientFactory.createReadOnlySchedulerClient(MessageFormat.format(Constants.AURORA_SCHEDULER_CONNECTION_URL,
 auroraHost, auroraPort));
+                       
+                       // get jobs summary
+//                     AuroraClientSample.getJobSummary(auroraSchedulerClient);
+                       
+                       // create sample job
+//                     AuroraClientSample.createJob();
+                       AuroraThriftClient client = 
AuroraThriftClient.getAuroraThriftClient(Constants.AURORA_SCHEDULER_PROP_FILE);
+                       ResponseBean response = 
client.getPendingReasonForJob(new JobKeyBean("devel", "centos", 
"hello_pending"));
+                       System.out.println(response);
+               } catch (Exception ex) {
+                       ex.printStackTrace();
+               } 
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/AuroraThriftClientUtil.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/AuroraThriftClientUtil.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/AuroraThriftClientUtil.java
new file mode 100644
index 0000000..c64576b
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/AuroraThriftClientUtil.java
@@ -0,0 +1,364 @@
+package org.apache.airavata.cloud.aurora.util;
+
+import java.nio.charset.Charset;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.airavata.cloud.aurora.client.bean.IdentityBean;
+import org.apache.airavata.cloud.aurora.client.bean.JobConfigBean;
+import org.apache.airavata.cloud.aurora.client.bean.JobDetailsResponseBean;
+import org.apache.airavata.cloud.aurora.client.bean.JobKeyBean;
+import org.apache.airavata.cloud.aurora.client.bean.PendingJobReasonBean;
+import org.apache.airavata.cloud.aurora.client.bean.ProcessBean;
+import org.apache.airavata.cloud.aurora.client.bean.ResourceBean;
+import org.apache.airavata.cloud.aurora.client.bean.ResponseBean;
+import org.apache.airavata.cloud.aurora.client.bean.ServerInfoBean;
+import org.apache.airavata.cloud.aurora.client.sdk.ExecutorConfig;
+import org.apache.airavata.cloud.aurora.client.sdk.Identity;
+import org.apache.airavata.cloud.aurora.client.sdk.JobConfiguration;
+import org.apache.airavata.cloud.aurora.client.sdk.JobKey;
+import org.apache.airavata.cloud.aurora.client.sdk.Resource;
+import org.apache.airavata.cloud.aurora.client.sdk.Response;
+import org.apache.airavata.cloud.aurora.client.sdk.TaskConfig;
+import org.apache.airavata.cloud.aurora.sample.AuroraClientSample;
+import org.apache.commons.io.IOUtils;
+import org.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The Class AuroraThriftClientUtil.
+ */
+public class AuroraThriftClientUtil {
+
+       /** The Constant logger. */
+       private final static Logger logger = 
LoggerFactory.getLogger(AuroraThriftClientUtil.class);
+       
+       /** The properties. */
+       private static Properties properties = new Properties();
+       
+       /**
+        * Gets the executor config json.
+        *
+        * @param jobConfig the job config
+        * @return the executor config json
+        * @throws Exception the exception
+        */
+       public static String getExecutorConfigJson(JobConfigBean jobConfig) 
throws Exception {
+               String exeConfigJson = null;
+               try {
+                       String template = 
IOUtils.toString(AuroraThriftClientUtil.class.getClassLoader()
+                                       
.getResourceAsStream("executor-config-template.json"), 
Charset.defaultCharset());
+                       
+                       JSONObject exeConfig = new JSONObject(template);
+                       if(exeConfig != null) {
+                               exeConfig.put("environment", 
jobConfig.getJob().getEnvironment());
+                               exeConfig.put("name", 
jobConfig.getJob().getName());
+                               exeConfig.put("role", 
jobConfig.getJob().getRole());
+                               exeConfig.put("cluster", 
jobConfig.getCluster());
+                               exeConfig.put("max_task_failures", 
jobConfig.getMaxTaskFailures());
+                               exeConfig.put("service", jobConfig.isService());
+                               
+                               exeConfig.getJSONObject("task").put("name", 
jobConfig.getTaskConfig().getTaskName());
+                               
+                               // add task resources
+                               
exeConfig.getJSONObject("task").getJSONObject("resources")
+                                       .put("cpu", 
jobConfig.getTaskConfig().getResources().getNumCpus());
+                               
+                               
exeConfig.getJSONObject("task").getJSONObject("resources")
+                               .put("disk", 
jobConfig.getTaskConfig().getResources().getDiskMb() * 1024 * 1024);
+                               
+                               
exeConfig.getJSONObject("task").getJSONObject("resources")
+                               .put("ram", 
jobConfig.getTaskConfig().getResources().getRamMb() * 1024 * 1024);
+                               
+                               // iterate over all processes
+                               for(ProcessBean process : 
jobConfig.getTaskConfig().getProcesses()) {
+                                       // add process to constraints
+                                       exeConfig.getJSONObject("task")
+                                               .getJSONArray("constraints")
+                                               .getJSONObject(0)
+                                               
.getJSONArray("order").put(process.getName());
+                                       
+                                       // define the process json
+                                       JSONObject processJson = new 
JSONObject();
+                                       processJson.put("final", 
process.isFinal())
+                                               .put("daemon", 
process.isDaemon())
+                                               .put("name", process.getName())
+                                               .put("ephemeral", 
process.isEphemeral())
+                                               .put("max_failures", 
process.getMax_failures())
+                                               .put("min_duration", 
process.getMin_duration())
+                                               .put("cmdline", 
process.getCmdLine());
+                                       
+                                       // add process json to list
+                                       exeConfig.getJSONObject("task")
+                                       
.getJSONArray("processes").put(processJson);
+                               }
+                               
+                               // convert json object to string
+                               exeConfigJson = exeConfig.toString();
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               return exeConfigJson;
+       }
+       
+       /**
+        * Gets the resource set.
+        *
+        * @param resources the resources
+        * @return the resource set
+        * @throws Exception the exception
+        */
+       public static Set<Resource> getResourceSet(ResourceBean resources) 
throws Exception {
+               Set<Resource> resourceSet = new HashSet<>();
+               
+               try {
+                       if(resources != null) {
+                               // add numCpus
+                               Resource resource = new Resource();
+                               resource.setNumCpus(resources.getNumCpus());
+                               resourceSet.add(resource);
+                               
+                               // add diskMb
+                               resource = new Resource();
+                               resource.setDiskMb(resources.getDiskMb());
+                               resourceSet.add(resource);
+                               
+                               // add ramMb
+                               resource = new Resource();
+                               resource.setRamMb(resources.getRamMb());
+                               resourceSet.add(resource);
+                       } else {
+                               throw new Exception("Resource Bean is NULL!");
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               
+               return resourceSet;
+       }
+       
+       /**
+        * Gets the executor config.
+        *
+        * @param exeConfigJson the exe config json
+        * @return the executor config
+        * @throws Exception the exception
+        */
+       public static ExecutorConfig getExecutorConfig(String exeConfigJson) 
throws Exception {
+               ExecutorConfig exeConfig = null;
+               
+               try {
+                       
properties.load(AuroraClientSample.class.getClassLoader().getResourceAsStream(Constants.AURORA_SCHEDULER_PROP_FILE));
+                       String executorName = 
properties.getProperty(Constants.AURORA_EXECUTOR_NAME);
+                       
+                       // create the executor config
+                       if(exeConfigJson != null) {
+                               exeConfig = new ExecutorConfig(executorName, 
exeConfigJson);
+                       } else {
+                               throw new Exception("Aurora Executor Config 
Data is NULL!");
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               
+               return exeConfig;
+       }
+       
+       /**
+        * Gets the aurora job key.
+        *
+        * @param jobKeyBean the job key bean
+        * @return the aurora job key
+        * @throws Exception the exception
+        */
+       public static JobKey getAuroraJobKey(JobKeyBean jobKeyBean) throws 
Exception {
+               JobKey jobKey = null;
+               
+               try {
+                       if(jobKeyBean != null) {
+                               jobKey = new JobKey(jobKeyBean.getRole(), 
+                                               jobKeyBean.getEnvironment(), 
+                                               jobKeyBean.getName());
+                       } else {
+                               throw new Exception("JobKey Bean is NULL!");
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               
+               return jobKey;
+       }
+       
+       /**
+        * Gets the aurora identity.
+        *
+        * @param identityBean the identity bean
+        * @return the aurora identity
+        * @throws Exception the exception
+        */
+       public static Identity getAuroraIdentity(IdentityBean identityBean) 
throws Exception {
+               Identity owner = null;
+               
+               try {
+                       if(identityBean != null) {
+                               owner = new Identity(identityBean.getUser());
+                       } else {
+                               throw new Exception("Identity Bean is NULL!");
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               
+               return owner;
+       }
+       
+       /**
+        * Gets the aurora job config.
+        *
+        * @param jobConfigBean the job config bean
+        * @return the aurora job config
+        * @throws Exception the exception
+        */
+       public static JobConfiguration getAuroraJobConfig(JobConfigBean 
jobConfigBean) throws Exception {
+               JobConfiguration jobConfig = null;
+               
+               try {
+                       if(jobConfigBean != null && 
+                                       jobConfigBean.getTaskConfig() != null) {
+                               
+                               JobKey jobKey = 
getAuroraJobKey(jobConfigBean.getJob());
+                               Identity owner = 
getAuroraIdentity(jobConfigBean.getOwner());
+                               // Construct the task config
+                               TaskConfig taskConfig = new TaskConfig();
+                               taskConfig.setJob(jobKey);
+                               taskConfig.setOwner(owner);
+                               
taskConfig.setIsService(jobConfigBean.isService()); 
+                               
taskConfig.setNumCpus(jobConfigBean.getTaskConfig().getResources().getNumCpus());
 
+                               
taskConfig.setRamMb(jobConfigBean.getTaskConfig().getResources().getRamMb());
+                               
taskConfig.setDiskMb(jobConfigBean.getTaskConfig().getResources().getDiskMb()); 
+                               
taskConfig.setPriority(jobConfigBean.getPriority());
+                               
taskConfig.setMaxTaskFailures(jobConfigBean.getMaxTaskFailures()); 
+                               
taskConfig.setResources(getResourceSet(jobConfigBean.getTaskConfig().getResources()));
+                               
+                               // construct the executor config for this job
+                               
taskConfig.setExecutorConfig(getExecutorConfig(getExecutorConfigJson(jobConfigBean)));
+                               
+                               // construct the job configuration
+                               jobConfig = new JobConfiguration(jobKey, 
+                                               owner, null, taskConfig, 
jobConfigBean.getInstances());
+                               
+                       } else {
+                               throw new Exception("JobConfig, TaskConfig Bean 
is/are NULL!");
+                       }
+               } catch(Exception ex) {
+                       logger.error(ex.getMessage(), ex);
+                       throw ex;
+               }
+               
+               return jobConfig;
+       }
+       
+       /**
+        * Gets the response bean.
+        *
+        * @param response the response
+        * @param resultType the result type
+        * @return the response bean
+        */
+       public static ResponseBean getResponseBean(Response response, 
ResponseResultType resultType) {
+               switch (resultType) {
+                       case GET_JOB_DETAILS:
+                               return getJobDetailsResponseBean(response);
+                       case GET_PENDING_JOB_REASON:
+                               return getPendingJobReasonBean(response);
+                       default:
+                               return getJobResponse(response);
+               }
+       }
+       
+       /**
+        * Gets the job details response bean.
+        *
+        * @param response the response
+        * @return the job details response bean
+        */
+       private static JobDetailsResponseBean 
getJobDetailsResponseBean(Response response) {
+               JobDetailsResponseBean responseBean = null;
+               if(response != null) {
+                       responseBean = new 
JobDetailsResponseBean(getJobResponse(response));
+                       
responseBean.setTasks(response.getResult().getScheduleStatusResult().getTasks());
+               }
+               
+               return responseBean;
+       }
+       
+       /**
+        * Gets the pending job reason bean.
+        *
+        * @param response the response
+        * @return the pending job reason bean
+        */
+       private static PendingJobReasonBean getPendingJobReasonBean(Response 
response) {
+               PendingJobReasonBean responseBean = null;
+               if(response != null) {
+                       responseBean = new 
PendingJobReasonBean(getJobResponse(response));
+                       
responseBean.setReasons(response.getResult().getGetPendingReasonResult().getReasons());
+               }
+               
+               return responseBean;
+       }
+       
+       /**
+        * Gets the job response.
+        *
+        * @param response the response
+        * @return the job response
+        */
+       private static ResponseBean getJobResponse(Response response) {
+               ResponseBean responseBean = null;
+               if(response != null) {
+                       responseBean = new ResponseBean();
+                       responseBean.setResponseCode(ResponseCodeEnum
+                                       
.findByValue(response.getResponseCode().getValue()));
+                       
+                       ServerInfoBean serverInfo = new 
ServerInfoBean(response.getServerInfo().getClusterName(), 
+                                       
response.getServerInfo().getStatsUrlPrefix()); 
+                       responseBean.setServerInfo(serverInfo);
+               }
+               
+               return responseBean;
+       }
+       
+       /**
+        * The main method.
+        *
+        * @param args the arguments
+        * @throws Exception the exception
+        */
+//     public static void main(String[] args) throws Exception {
+//             JobKeyBean jobKey = new JobKeyBean("devel", "centos", 
"test_job");
+//             IdentityBean owner = new IdentityBean("centos");
+//             
+//             ProcessBean proc1 = new ProcessBean("process_1", "echo 
'hello_world_1'", false);
+//             ProcessBean proc2 = new ProcessBean("process_2", "echo 
'hello_world_2'", false);
+//             Set<ProcessBean> processes = new HashSet<>();
+//             processes.add(proc1);
+//             processes.add(proc2);
+//             
+//             ResourceBean resources = new ResourceBean(0.1, 8, 1);
+//             
+//             TaskConfigBean taskConfig = new 
TaskConfigBean("task_hello_world", processes, resources);
+//             JobConfigBean jobConfig = new JobConfigBean(jobKey, owner, 
taskConfig, "example");
+//             
+//             String executorConfigJson = getExecutorConfigJson(jobConfig);
+//             System.out.println(executorConfigJson);
+//     }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/Constants.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/Constants.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/Constants.java
new file mode 100644
index 0000000..b1227f4
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/Constants.java
@@ -0,0 +1,25 @@
+package org.apache.airavata.cloud.aurora.util;
+
+/**
+ * The Class Constants.
+ */
+public class Constants {
+
+       /** The Constant AURORA_SCHEDULER_PROP_FILE. */
+       public static final String AURORA_SCHEDULER_PROP_FILE = 
"aurora-scheduler.properties";
+       
+       /** The Constant AURORA_SCHEDULER_HOST. */
+       public static final String AURORA_SCHEDULER_HOST = 
"aurora.scheduler.host";
+       
+       /** The Constant AURORA_SCHEDULER_PORT. */
+       public static final String AURORA_SCHEDULER_PORT = 
"aurora.scheduler.port";
+       
+       /** The Constant AURORA_EXECUTOR_NAME. */
+       public static final String AURORA_EXECUTOR_NAME = 
"aurora.executor.name";
+       
+       /** The Constant MESOS_CLUSTER_NAME. */
+       public static final String MESOS_CLUSTER_NAME = "mesos.cluster.name";
+       
+       /** The Constant AURORA_SCHEDULER_CONNECTION_URL. */
+       public static final String AURORA_SCHEDULER_CONNECTION_URL = 
"http://{0}:{1}/api";;
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseCodeEnum.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseCodeEnum.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseCodeEnum.java
new file mode 100644
index 0000000..822829b
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseCodeEnum.java
@@ -0,0 +1,76 @@
+package org.apache.airavata.cloud.aurora.util;
+
+/**
+ * The Enum ResponseCodeEnum.
+ */
+public enum ResponseCodeEnum {
+       
+       /** The invalid request. */
+       INVALID_REQUEST(0),
+       
+       /** The ok. */
+       OK(1),
+       
+       /** The error. */
+       ERROR(2),
+       
+       /** The warning. */
+       WARNING(3),
+       
+       /** The auth failed. */
+       AUTH_FAILED(4),
+       
+       /** The lock error. */
+       LOCK_ERROR(5),
+       
+       /** The error transient. */
+       ERROR_TRANSIENT(6);
+       
+       /** The value. */
+       private final int value;
+       
+       /**
+        * Instantiates a new response code enum.
+        *
+        * @param value the value
+        */
+       private ResponseCodeEnum(int value) {
+               this.value = value;
+       }
+       
+       /**
+        * Gets the value.
+        *
+        * @return the value
+        */
+       public int getValue() {
+               return value;
+       }
+       
+       /**
+        * Find by value.
+        *
+        * @param value the value
+        * @return the response code enum
+        */
+       public static ResponseCodeEnum findByValue(int value) { 
+           switch (value) {
+             case 0:
+               return INVALID_REQUEST;
+             case 1:
+               return OK;
+             case 2:
+               return ERROR;
+             case 3:
+               return WARNING;
+             case 4:
+               return AUTH_FAILED;
+             case 5:
+               return LOCK_ERROR;
+             case 6:
+               return ERROR_TRANSIENT;
+             default:
+               return null;
+           }
+       }
+}

http://git-wip-us.apache.org/repos/asf/airavata/blob/0f781b2e/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseResultType.java
----------------------------------------------------------------------
diff --git 
a/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseResultType.java
 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseResultType.java
new file mode 100644
index 0000000..333c160
--- /dev/null
+++ 
b/modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/util/ResponseResultType.java
@@ -0,0 +1,28 @@
+package org.apache.airavata.cloud.aurora.util;
+
+/**
+ * The Enum ResponseResultType.
+ */
+public enum ResponseResultType {
+
+       /** The create job. */
+       CREATE_JOB,
+       
+       /** The get jobs. */
+       GET_JOBS,
+       
+       /** The get job summary. */
+       GET_JOB_SUMMARY,
+       
+       /** The get pending reason. */
+       GET_PENDING_REASON,
+       
+       /** The get job details. */
+       GET_JOB_DETAILS,
+       
+       /** The get pending job reason. */
+       GET_PENDING_JOB_REASON,
+       
+       /** The kill task. */
+       KILL_TASKS
+}

Reply via email to