http://git-wip-us.apache.org/repos/asf/hive/blob/c5b4d66d/llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
----------------------------------------------------------------------
diff --git 
a/llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
 
b/llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
index 56a1361..0581681 100644
--- 
a/llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
+++ 
b/llap-common/src/gen/protobuf/gen-java/org/apache/hadoop/hive/llap/daemon/rpc/LlapDaemonProtocolProtos.java
@@ -1,5 +1,5 @@
 // Generated by the protocol buffer compiler.  DO NOT EDIT!
-// source: LlapDaemonProtocol.proto
+// source: src/protobuf/LlapDaemonProtocol.proto
 
 package org.apache.hadoop.hive.llap.daemon.rpc;
 
@@ -3212,785 +3212,6 @@ public final class LlapDaemonProtocolProtos {
     // @@protoc_insertion_point(class_scope:GroupInputSpecProto)
   }
 
-  public interface VertexIdentifierOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
-
-    // optional string application_id_string = 1;
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    boolean hasApplicationIdString();
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    java.lang.String getApplicationIdString();
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    com.google.protobuf.ByteString
-        getApplicationIdStringBytes();
-
-    // optional int32 app_attempt_number = 2;
-    /**
-     * <code>optional int32 app_attempt_number = 2;</code>
-     */
-    boolean hasAppAttemptNumber();
-    /**
-     * <code>optional int32 app_attempt_number = 2;</code>
-     */
-    int getAppAttemptNumber();
-
-    // optional int32 dag_id = 3;
-    /**
-     * <code>optional int32 dag_id = 3;</code>
-     */
-    boolean hasDagId();
-    /**
-     * <code>optional int32 dag_id = 3;</code>
-     */
-    int getDagId();
-
-    // optional int32 vertex_id = 4;
-    /**
-     * <code>optional int32 vertex_id = 4;</code>
-     */
-    boolean hasVertexId();
-    /**
-     * <code>optional int32 vertex_id = 4;</code>
-     */
-    int getVertexId();
-  }
-  /**
-   * Protobuf type {@code VertexIdentifier}
-   */
-  public static final class VertexIdentifier extends
-      com.google.protobuf.GeneratedMessage
-      implements VertexIdentifierOrBuilder {
-    // Use VertexIdentifier.newBuilder() to construct.
-    private VertexIdentifier(com.google.protobuf.GeneratedMessage.Builder<?> 
builder) {
-      super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private VertexIdentifier(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final VertexIdentifier defaultInstance;
-    public static VertexIdentifier getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public VertexIdentifier getDefaultInstanceForType() {
-      return defaultInstance;
-    }
-
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
-    @java.lang.Override
-    public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
-      return this.unknownFields;
-    }
-    private VertexIdentifier(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
-      int mutable_bitField0_ = 0;
-      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
-          com.google.protobuf.UnknownFieldSet.newBuilder();
-      try {
-        boolean done = false;
-        while (!done) {
-          int tag = input.readTag();
-          switch (tag) {
-            case 0:
-              done = true;
-              break;
-            default: {
-              if (!parseUnknownField(input, unknownFields,
-                                     extensionRegistry, tag)) {
-                done = true;
-              }
-              break;
-            }
-            case 10: {
-              bitField0_ |= 0x00000001;
-              applicationIdString_ = input.readBytes();
-              break;
-            }
-            case 16: {
-              bitField0_ |= 0x00000002;
-              appAttemptNumber_ = input.readInt32();
-              break;
-            }
-            case 24: {
-              bitField0_ |= 0x00000004;
-              dagId_ = input.readInt32();
-              break;
-            }
-            case 32: {
-              bitField0_ |= 0x00000008;
-              vertexId_ = input.readInt32();
-              break;
-            }
-          }
-        }
-      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-        throw e.setUnfinishedMessage(this);
-      } catch (java.io.IOException e) {
-        throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
-      } finally {
-        this.unknownFields = unknownFields.build();
-        makeExtensionsImmutable();
-      }
-    }
-    public static final com.google.protobuf.Descriptors.Descriptor
-        getDescriptor() {
-      return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor;
-    }
-
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-        internalGetFieldAccessorTable() {
-      return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_fieldAccessorTable
-          .ensureFieldAccessorsInitialized(
-              
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.class,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder.class);
-    }
-
-    public static com.google.protobuf.Parser<VertexIdentifier> PARSER =
-        new com.google.protobuf.AbstractParser<VertexIdentifier>() {
-      public VertexIdentifier parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new VertexIdentifier(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<VertexIdentifier> getParserForType() {
-      return PARSER;
-    }
-
-    private int bitField0_;
-    // optional string application_id_string = 1;
-    public static final int APPLICATION_ID_STRING_FIELD_NUMBER = 1;
-    private java.lang.Object applicationIdString_;
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    public boolean hasApplicationIdString() {
-      return ((bitField0_ & 0x00000001) == 0x00000001);
-    }
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    public java.lang.String getApplicationIdString() {
-      java.lang.Object ref = applicationIdString_;
-      if (ref instanceof java.lang.String) {
-        return (java.lang.String) ref;
-      } else {
-        com.google.protobuf.ByteString bs = 
-            (com.google.protobuf.ByteString) ref;
-        java.lang.String s = bs.toStringUtf8();
-        if (bs.isValidUtf8()) {
-          applicationIdString_ = s;
-        }
-        return s;
-      }
-    }
-    /**
-     * <code>optional string application_id_string = 1;</code>
-     */
-    public com.google.protobuf.ByteString
-        getApplicationIdStringBytes() {
-      java.lang.Object ref = applicationIdString_;
-      if (ref instanceof java.lang.String) {
-        com.google.protobuf.ByteString b = 
-            com.google.protobuf.ByteString.copyFromUtf8(
-                (java.lang.String) ref);
-        applicationIdString_ = b;
-        return b;
-      } else {
-        return (com.google.protobuf.ByteString) ref;
-      }
-    }
-
-    // optional int32 app_attempt_number = 2;
-    public static final int APP_ATTEMPT_NUMBER_FIELD_NUMBER = 2;
-    private int appAttemptNumber_;
-    /**
-     * <code>optional int32 app_attempt_number = 2;</code>
-     */
-    public boolean hasAppAttemptNumber() {
-      return ((bitField0_ & 0x00000002) == 0x00000002);
-    }
-    /**
-     * <code>optional int32 app_attempt_number = 2;</code>
-     */
-    public int getAppAttemptNumber() {
-      return appAttemptNumber_;
-    }
-
-    // optional int32 dag_id = 3;
-    public static final int DAG_ID_FIELD_NUMBER = 3;
-    private int dagId_;
-    /**
-     * <code>optional int32 dag_id = 3;</code>
-     */
-    public boolean hasDagId() {
-      return ((bitField0_ & 0x00000004) == 0x00000004);
-    }
-    /**
-     * <code>optional int32 dag_id = 3;</code>
-     */
-    public int getDagId() {
-      return dagId_;
-    }
-
-    // optional int32 vertex_id = 4;
-    public static final int VERTEX_ID_FIELD_NUMBER = 4;
-    private int vertexId_;
-    /**
-     * <code>optional int32 vertex_id = 4;</code>
-     */
-    public boolean hasVertexId() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
-    }
-    /**
-     * <code>optional int32 vertex_id = 4;</code>
-     */
-    public int getVertexId() {
-      return vertexId_;
-    }
-
-    private void initFields() {
-      applicationIdString_ = "";
-      appAttemptNumber_ = 0;
-      dagId_ = 0;
-      vertexId_ = 0;
-    }
-    private byte memoizedIsInitialized = -1;
-    public final boolean isInitialized() {
-      byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
-
-      memoizedIsInitialized = 1;
-      return true;
-    }
-
-    public void writeTo(com.google.protobuf.CodedOutputStream output)
-                        throws java.io.IOException {
-      getSerializedSize();
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeBytes(1, getApplicationIdStringBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        output.writeInt32(2, appAttemptNumber_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeInt32(3, dagId_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeInt32(4, vertexId_);
-      }
-      getUnknownFields().writeTo(output);
-    }
-
-    private int memoizedSerializedSize = -1;
-    public int getSerializedSize() {
-      int size = memoizedSerializedSize;
-      if (size != -1) return size;
-
-      size = 0;
-      if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(1, getApplicationIdStringBytes());
-      }
-      if (((bitField0_ & 0x00000002) == 0x00000002)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(2, appAttemptNumber_);
-      }
-      if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(3, dagId_);
-      }
-      if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(4, vertexId_);
-      }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
-      return size;
-    }
-
-    private static final long serialVersionUID = 0L;
-    @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
-    public boolean equals(final java.lang.Object obj) {
-      if (obj == this) {
-       return true;
-      }
-      if (!(obj instanceof 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier))
 {
-        return super.equals(obj);
-      }
-      
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 other = 
(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)
 obj;
-
-      boolean result = true;
-      result = result && (hasApplicationIdString() == 
other.hasApplicationIdString());
-      if (hasApplicationIdString()) {
-        result = result && getApplicationIdString()
-            .equals(other.getApplicationIdString());
-      }
-      result = result && (hasAppAttemptNumber() == 
other.hasAppAttemptNumber());
-      if (hasAppAttemptNumber()) {
-        result = result && (getAppAttemptNumber()
-            == other.getAppAttemptNumber());
-      }
-      result = result && (hasDagId() == other.hasDagId());
-      if (hasDagId()) {
-        result = result && (getDagId()
-            == other.getDagId());
-      }
-      result = result && (hasVertexId() == other.hasVertexId());
-      if (hasVertexId()) {
-        result = result && (getVertexId()
-            == other.getVertexId());
-      }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
-      return result;
-    }
-
-    private int memoizedHashCode = 0;
-    @java.lang.Override
-    public int hashCode() {
-      if (memoizedHashCode != 0) {
-        return memoizedHashCode;
-      }
-      int hash = 41;
-      hash = (19 * hash) + getDescriptorForType().hashCode();
-      if (hasApplicationIdString()) {
-        hash = (37 * hash) + APPLICATION_ID_STRING_FIELD_NUMBER;
-        hash = (53 * hash) + getApplicationIdString().hashCode();
-      }
-      if (hasAppAttemptNumber()) {
-        hash = (37 * hash) + APP_ATTEMPT_NUMBER_FIELD_NUMBER;
-        hash = (53 * hash) + getAppAttemptNumber();
-      }
-      if (hasDagId()) {
-        hash = (37 * hash) + DAG_ID_FIELD_NUMBER;
-        hash = (53 * hash) + getDagId();
-      }
-      if (hasVertexId()) {
-        hash = (37 * hash) + VERTEX_ID_FIELD_NUMBER;
-        hash = (53 * hash) + getVertexId();
-      }
-      hash = (29 * hash) + getUnknownFields().hashCode();
-      memoizedHashCode = hash;
-      return hash;
-    }
-
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        com.google.protobuf.ByteString data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        com.google.protobuf.ByteString data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(byte[] data)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        byte[] data,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws com.google.protobuf.InvalidProtocolBufferException {
-      return PARSER.parseFrom(data, extensionRegistry);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseDelimitedFrom(java.io.InputStream input)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseDelimitedFrom(
-        java.io.InputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        com.google.protobuf.CodedInputStream input)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input);
-    }
-    public static 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parseFrom(
-        com.google.protobuf.CodedInputStream input,
-        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-        throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
-    }
-
-    public static Builder newBuilder() { return Builder.create(); }
-    public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder 
newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 prototype) {
-      return newBuilder().mergeFrom(prototype);
-    }
-    public Builder toBuilder() { return newBuilder(this); }
-
-    @java.lang.Override
-    protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-      Builder builder = new Builder(parent);
-      return builder;
-    }
-    /**
-     * Protobuf type {@code VertexIdentifier}
-     */
-    public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder
 {
-      public static final com.google.protobuf.Descriptors.Descriptor
-          getDescriptor() {
-        return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor;
-      }
-
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
-          internalGetFieldAccessorTable() {
-        return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_fieldAccessorTable
-            .ensureFieldAccessorsInitialized(
-                
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.class,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder.class);
-      }
-
-      // Construct using 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.newBuilder()
-      private Builder() {
-        maybeForceBuilderInitialization();
-      }
-
-      private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
-        super(parent);
-        maybeForceBuilderInitialization();
-      }
-      private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-        }
-      }
-      private static Builder create() {
-        return new Builder();
-      }
-
-      public Builder clear() {
-        super.clear();
-        applicationIdString_ = "";
-        bitField0_ = (bitField0_ & ~0x00000001);
-        appAttemptNumber_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000002);
-        dagId_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000004);
-        vertexId_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000008);
-        return this;
-      }
-
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
-      public com.google.protobuf.Descriptors.Descriptor
-          getDescriptorForType() {
-        return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexIdentifier_descriptor;
-      }
-
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 getDefaultInstanceForType() {
-        return 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
-      }
-
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 build() {
-        
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 result = buildPartial();
-        if (!result.isInitialized()) {
-          throw newUninitializedMessageException(result);
-        }
-        return result;
-      }
-
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 buildPartial() {
-        
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 result = new 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier(this);
-        int from_bitField0_ = bitField0_;
-        int to_bitField0_ = 0;
-        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
-          to_bitField0_ |= 0x00000001;
-        }
-        result.applicationIdString_ = applicationIdString_;
-        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
-          to_bitField0_ |= 0x00000002;
-        }
-        result.appAttemptNumber_ = appAttemptNumber_;
-        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
-          to_bitField0_ |= 0x00000004;
-        }
-        result.dagId_ = dagId_;
-        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
-          to_bitField0_ |= 0x00000008;
-        }
-        result.vertexId_ = vertexId_;
-        result.bitField0_ = to_bitField0_;
-        onBuilt();
-        return result;
-      }
-
-      public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)
 {
-          return 
mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)other);
-        } else {
-          super.mergeFrom(other);
-          return this;
-        }
-      }
-
-      public Builder 
mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 other) {
-        if (other == 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance())
 return this;
-        if (other.hasApplicationIdString()) {
-          bitField0_ |= 0x00000001;
-          applicationIdString_ = other.applicationIdString_;
-          onChanged();
-        }
-        if (other.hasAppAttemptNumber()) {
-          setAppAttemptNumber(other.getAppAttemptNumber());
-        }
-        if (other.hasDagId()) {
-          setDagId(other.getDagId());
-        }
-        if (other.hasVertexId()) {
-          setVertexId(other.getVertexId());
-        }
-        this.mergeUnknownFields(other.getUnknownFields());
-        return this;
-      }
-
-      public final boolean isInitialized() {
-        return true;
-      }
-
-      public Builder mergeFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws java.io.IOException {
-        
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 parsedMessage = null;
-        try {
-          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
-        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = 
(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier)
 e.getUnfinishedMessage();
-          throw e;
-        } finally {
-          if (parsedMessage != null) {
-            mergeFrom(parsedMessage);
-          }
-        }
-        return this;
-      }
-      private int bitField0_;
-
-      // optional string application_id_string = 1;
-      private java.lang.Object applicationIdString_ = "";
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public boolean hasApplicationIdString() {
-        return ((bitField0_ & 0x00000001) == 0x00000001);
-      }
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public java.lang.String getApplicationIdString() {
-        java.lang.Object ref = applicationIdString_;
-        if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          applicationIdString_ = s;
-          return s;
-        } else {
-          return (java.lang.String) ref;
-        }
-      }
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public com.google.protobuf.ByteString
-          getApplicationIdStringBytes() {
-        java.lang.Object ref = applicationIdString_;
-        if (ref instanceof String) {
-          com.google.protobuf.ByteString b = 
-              com.google.protobuf.ByteString.copyFromUtf8(
-                  (java.lang.String) ref);
-          applicationIdString_ = b;
-          return b;
-        } else {
-          return (com.google.protobuf.ByteString) ref;
-        }
-      }
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public Builder setApplicationIdString(
-          java.lang.String value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        applicationIdString_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public Builder clearApplicationIdString() {
-        bitField0_ = (bitField0_ & ~0x00000001);
-        applicationIdString_ = getDefaultInstance().getApplicationIdString();
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional string application_id_string = 1;</code>
-       */
-      public Builder setApplicationIdStringBytes(
-          com.google.protobuf.ByteString value) {
-        if (value == null) {
-    throw new NullPointerException();
-  }
-  bitField0_ |= 0x00000001;
-        applicationIdString_ = value;
-        onChanged();
-        return this;
-      }
-
-      // optional int32 app_attempt_number = 2;
-      private int appAttemptNumber_ ;
-      /**
-       * <code>optional int32 app_attempt_number = 2;</code>
-       */
-      public boolean hasAppAttemptNumber() {
-        return ((bitField0_ & 0x00000002) == 0x00000002);
-      }
-      /**
-       * <code>optional int32 app_attempt_number = 2;</code>
-       */
-      public int getAppAttemptNumber() {
-        return appAttemptNumber_;
-      }
-      /**
-       * <code>optional int32 app_attempt_number = 2;</code>
-       */
-      public Builder setAppAttemptNumber(int value) {
-        bitField0_ |= 0x00000002;
-        appAttemptNumber_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 app_attempt_number = 2;</code>
-       */
-      public Builder clearAppAttemptNumber() {
-        bitField0_ = (bitField0_ & ~0x00000002);
-        appAttemptNumber_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // optional int32 dag_id = 3;
-      private int dagId_ ;
-      /**
-       * <code>optional int32 dag_id = 3;</code>
-       */
-      public boolean hasDagId() {
-        return ((bitField0_ & 0x00000004) == 0x00000004);
-      }
-      /**
-       * <code>optional int32 dag_id = 3;</code>
-       */
-      public int getDagId() {
-        return dagId_;
-      }
-      /**
-       * <code>optional int32 dag_id = 3;</code>
-       */
-      public Builder setDagId(int value) {
-        bitField0_ |= 0x00000004;
-        dagId_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 dag_id = 3;</code>
-       */
-      public Builder clearDagId() {
-        bitField0_ = (bitField0_ & ~0x00000004);
-        dagId_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // optional int32 vertex_id = 4;
-      private int vertexId_ ;
-      /**
-       * <code>optional int32 vertex_id = 4;</code>
-       */
-      public boolean hasVertexId() {
-        return ((bitField0_ & 0x00000008) == 0x00000008);
-      }
-      /**
-       * <code>optional int32 vertex_id = 4;</code>
-       */
-      public int getVertexId() {
-        return vertexId_;
-      }
-      /**
-       * <code>optional int32 vertex_id = 4;</code>
-       */
-      public Builder setVertexId(int value) {
-        bitField0_ |= 0x00000008;
-        vertexId_ = value;
-        onChanged();
-        return this;
-      }
-      /**
-       * <code>optional int32 vertex_id = 4;</code>
-       */
-      public Builder clearVertexId() {
-        bitField0_ = (bitField0_ & ~0x00000008);
-        vertexId_ = 0;
-        onChanged();
-        return this;
-      }
-
-      // @@protoc_insertion_point(builder_scope:VertexIdentifier)
-    }
-
-    static {
-      defaultInstance = new VertexIdentifier(true);
-      defaultInstance.initFields();
-    }
-
-    // @@protoc_insertion_point(class_scope:VertexIdentifier)
-  }
-
   public interface SignableVertexSpecOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
 
@@ -4019,23 +3240,38 @@ public final class LlapDaemonProtocolProtos {
      */
     long getSignatureKeyId();
 
-    // optional .VertexIdentifier vertexIdentifier = 3;
+    // optional .QueryIdentifierProto query_identifier = 3;
+    /**
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
+     */
+    boolean hasQueryIdentifier();
+    /**
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
+     */
+    
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 getQueryIdentifier();
+    /**
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
+     */
+    
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder
 getQueryIdentifierOrBuilder();
+
+    // optional string hive_query_id = 4;
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional string hive_query_id = 4;</code>
      */
-    boolean hasVertexIdentifier();
+    boolean hasHiveQueryId();
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional string hive_query_id = 4;</code>
      */
-    
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 getVertexIdentifier();
+    java.lang.String getHiveQueryId();
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional string hive_query_id = 4;</code>
      */
-    
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder
 getVertexIdentifierOrBuilder();
+    com.google.protobuf.ByteString
+        getHiveQueryIdBytes();
 
-    // optional string dag_name = 4;
+    // optional string dag_name = 5;
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
@@ -4043,7 +3279,7 @@ public final class LlapDaemonProtocolProtos {
      */
     boolean hasDagName();
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
@@ -4051,7 +3287,7 @@ public final class LlapDaemonProtocolProtos {
      */
     java.lang.String getDagName();
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
@@ -4060,24 +3296,34 @@ public final class LlapDaemonProtocolProtos {
     com.google.protobuf.ByteString
         getDagNameBytes();
 
-    // optional string vertex_name = 5;
+    // optional string vertex_name = 6;
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     boolean hasVertexName();
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     java.lang.String getVertexName();
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     com.google.protobuf.ByteString
         getVertexNameBytes();
 
-    // optional string token_identifier = 6;
+    // optional int32 vertex_index = 7;
+    /**
+     * <code>optional int32 vertex_index = 7;</code>
+     */
+    boolean hasVertexIndex();
+    /**
+     * <code>optional int32 vertex_index = 7;</code>
+     */
+    int getVertexIndex();
+
+    // optional string token_identifier = 8;
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
@@ -4085,7 +3331,7 @@ public final class LlapDaemonProtocolProtos {
      */
     boolean hasTokenIdentifier();
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
@@ -4093,7 +3339,7 @@ public final class LlapDaemonProtocolProtos {
      */
     java.lang.String getTokenIdentifier();
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
@@ -4102,98 +3348,98 @@ public final class LlapDaemonProtocolProtos {
     com.google.protobuf.ByteString
         getTokenIdentifierBytes();
 
-    // optional .EntityDescriptorProto processor_descriptor = 7;
+    // optional .EntityDescriptorProto processor_descriptor = 9;
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     boolean hasProcessorDescriptor();
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto
 getProcessorDescriptor();
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder
 getProcessorDescriptorOrBuilder();
 
-    // repeated .IOSpecProto input_specs = 8;
+    // repeated .IOSpecProto input_specs = 10;
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 
         getInputSpecsList();
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto 
getInputSpecs(int index);
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     int getInputSpecsCount();
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>
 
         getInputSpecsOrBuilderList();
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder
 getInputSpecsOrBuilder(
         int index);
 
-    // repeated .IOSpecProto output_specs = 9;
+    // repeated .IOSpecProto output_specs = 11;
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 
         getOutputSpecsList();
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto 
getOutputSpecs(int index);
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     int getOutputSpecsCount();
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>
 
         getOutputSpecsOrBuilderList();
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder
 getOutputSpecsOrBuilder(
         int index);
 
-    // repeated .GroupInputSpecProto grouped_input_specs = 10;
+    // repeated .GroupInputSpecProto grouped_input_specs = 12;
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>
 
         getGroupedInputSpecsList();
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto
 getGroupedInputSpecs(int index);
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     int getGroupedInputSpecsCount();
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder>
 
         getGroupedInputSpecsOrBuilderList();
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder
 getGroupedInputSpecsOrBuilder(
         int index);
 
-    // optional int32 vertex_parallelism = 11;
+    // optional int32 vertex_parallelism = 13;
     /**
-     * <code>optional int32 vertex_parallelism = 11;</code>
+     * <code>optional int32 vertex_parallelism = 13;</code>
      *
      * <pre>
      * An internal field required for Tez.
@@ -4201,7 +3447,7 @@ public final class LlapDaemonProtocolProtos {
      */
     boolean hasVertexParallelism();
     /**
-     * <code>optional int32 vertex_parallelism = 11;</code>
+     * <code>optional int32 vertex_parallelism = 13;</code>
      *
      * <pre>
      * An internal field required for Tez.
@@ -4275,36 +3521,46 @@ public final class LlapDaemonProtocolProtos {
               break;
             }
             case 26: {
-              
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder
 subBuilder = null;
+              
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder
 subBuilder = null;
               if (((bitField0_ & 0x00000004) == 0x00000004)) {
-                subBuilder = vertexIdentifier_.toBuilder();
+                subBuilder = queryIdentifier_.toBuilder();
               }
-              vertexIdentifier_ = 
input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.PARSER,
 extensionRegistry);
+              queryIdentifier_ = 
input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER,
 extensionRegistry);
               if (subBuilder != null) {
-                subBuilder.mergeFrom(vertexIdentifier_);
-                vertexIdentifier_ = subBuilder.buildPartial();
+                subBuilder.mergeFrom(queryIdentifier_);
+                queryIdentifier_ = subBuilder.buildPartial();
               }
               bitField0_ |= 0x00000004;
               break;
             }
             case 34: {
               bitField0_ |= 0x00000008;
-              dagName_ = input.readBytes();
+              hiveQueryId_ = input.readBytes();
               break;
             }
             case 42: {
               bitField0_ |= 0x00000010;
-              vertexName_ = input.readBytes();
+              dagName_ = input.readBytes();
               break;
             }
             case 50: {
               bitField0_ |= 0x00000020;
+              vertexName_ = input.readBytes();
+              break;
+            }
+            case 56: {
+              bitField0_ |= 0x00000040;
+              vertexIndex_ = input.readInt32();
+              break;
+            }
+            case 66: {
+              bitField0_ |= 0x00000080;
               tokenIdentifier_ = input.readBytes();
               break;
             }
-            case 58: {
+            case 74: {
               
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder
 subBuilder = null;
-              if (((bitField0_ & 0x00000040) == 0x00000040)) {
+              if (((bitField0_ & 0x00000100) == 0x00000100)) {
                 subBuilder = processorDescriptor_.toBuilder();
               }
               processorDescriptor_ = 
input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.PARSER,
 extensionRegistry);
@@ -4312,35 +3568,35 @@ public final class LlapDaemonProtocolProtos {
                 subBuilder.mergeFrom(processorDescriptor_);
                 processorDescriptor_ = subBuilder.buildPartial();
               }
-              bitField0_ |= 0x00000040;
+              bitField0_ |= 0x00000100;
               break;
             }
-            case 66: {
-              if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+            case 82: {
+              if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
                 inputSpecs_ = new 
java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>();
-                mutable_bitField0_ |= 0x00000080;
+                mutable_bitField0_ |= 0x00000200;
               }
               
inputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER,
 extensionRegistry));
               break;
             }
-            case 74: {
-              if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+            case 90: {
+              if (!((mutable_bitField0_ & 0x00000400) == 0x00000400)) {
                 outputSpecs_ = new 
java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>();
-                mutable_bitField0_ |= 0x00000100;
+                mutable_bitField0_ |= 0x00000400;
               }
               
outputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER,
 extensionRegistry));
               break;
             }
-            case 82: {
-              if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
+            case 98: {
+              if (!((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
                 groupedInputSpecs_ = new 
java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>();
-                mutable_bitField0_ |= 0x00000200;
+                mutable_bitField0_ |= 0x00000800;
               }
               
groupedInputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.PARSER,
 extensionRegistry));
               break;
             }
-            case 88: {
-              bitField0_ |= 0x00000080;
+            case 104: {
+              bitField0_ |= 0x00000200;
               vertexParallelism_ = input.readInt32();
               break;
             }
@@ -4352,13 +3608,13 @@ public final class LlapDaemonProtocolProtos {
         throw new com.google.protobuf.InvalidProtocolBufferException(
             e.getMessage()).setUnfinishedMessage(this);
       } finally {
-        if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
+        if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
           inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_);
         }
-        if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) {
+        if (((mutable_bitField0_ & 0x00000400) == 0x00000400)) {
           outputSpecs_ = java.util.Collections.unmodifiableList(outputSpecs_);
         }
-        if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) {
+        if (((mutable_bitField0_ & 0x00000800) == 0x00000800)) {
           groupedInputSpecs_ = 
java.util.Collections.unmodifiableList(groupedInputSpecs_);
         }
         this.unknownFields = unknownFields.build();
@@ -4452,43 +3708,86 @@ public final class LlapDaemonProtocolProtos {
       return signatureKeyId_;
     }
 
-    // optional .VertexIdentifier vertexIdentifier = 3;
-    public static final int VERTEXIDENTIFIER_FIELD_NUMBER = 3;
-    private 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 vertexIdentifier_;
+    // optional .QueryIdentifierProto query_identifier = 3;
+    public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 3;
+    private 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 queryIdentifier_;
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
      */
-    public boolean hasVertexIdentifier() {
+    public boolean hasQueryIdentifier() {
       return ((bitField0_ & 0x00000004) == 0x00000004);
     }
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
+     */
+    public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 getQueryIdentifier() {
+      return queryIdentifier_;
+    }
+    /**
+     * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
+     */
+    public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder
 getQueryIdentifierOrBuilder() {
+      return queryIdentifier_;
+    }
+
+    // optional string hive_query_id = 4;
+    public static final int HIVE_QUERY_ID_FIELD_NUMBER = 4;
+    private java.lang.Object hiveQueryId_;
+    /**
+     * <code>optional string hive_query_id = 4;</code>
+     */
+    public boolean hasHiveQueryId() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional string hive_query_id = 4;</code>
      */
-    public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 getVertexIdentifier() {
-      return vertexIdentifier_;
+    public java.lang.String getHiveQueryId() {
+      java.lang.Object ref = hiveQueryId_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          hiveQueryId_ = s;
+        }
+        return s;
+      }
     }
     /**
-     * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+     * <code>optional string hive_query_id = 4;</code>
      */
-    public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder
 getVertexIdentifierOrBuilder() {
-      return vertexIdentifier_;
+    public com.google.protobuf.ByteString
+        getHiveQueryIdBytes() {
+      java.lang.Object ref = hiveQueryId_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        hiveQueryId_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
     }
 
-    // optional string dag_name = 4;
-    public static final int DAG_NAME_FIELD_NUMBER = 4;
+    // optional string dag_name = 5;
+    public static final int DAG_NAME_FIELD_NUMBER = 5;
     private java.lang.Object dagName_;
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
      * </pre>
      */
     public boolean hasDagName() {
-      return ((bitField0_ & 0x00000008) == 0x00000008);
+      return ((bitField0_ & 0x00000010) == 0x00000010);
     }
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
@@ -4509,7 +3808,7 @@ public final class LlapDaemonProtocolProtos {
       }
     }
     /**
-     * <code>optional string dag_name = 4;</code>
+     * <code>optional string dag_name = 5;</code>
      *
      * <pre>
      * Display names cannot be modified by the client for now. If needed, they 
should be sent to HS2 who will put them here.
@@ -4529,17 +3828,17 @@ public final class LlapDaemonProtocolProtos {
       }
     }
 
-    // optional string vertex_name = 5;
-    public static final int VERTEX_NAME_FIELD_NUMBER = 5;
+    // optional string vertex_name = 6;
+    public static final int VERTEX_NAME_FIELD_NUMBER = 6;
     private java.lang.Object vertexName_;
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     public boolean hasVertexName() {
-      return ((bitField0_ & 0x00000010) == 0x00000010);
+      return ((bitField0_ & 0x00000020) == 0x00000020);
     }
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     public java.lang.String getVertexName() {
       java.lang.Object ref = vertexName_;
@@ -4556,7 +3855,7 @@ public final class LlapDaemonProtocolProtos {
       }
     }
     /**
-     * <code>optional string vertex_name = 5;</code>
+     * <code>optional string vertex_name = 6;</code>
      */
     public com.google.protobuf.ByteString
         getVertexNameBytes() {
@@ -4572,21 +3871,37 @@ public final class LlapDaemonProtocolProtos {
       }
     }
 
-    // optional string token_identifier = 6;
-    public static final int TOKEN_IDENTIFIER_FIELD_NUMBER = 6;
+    // optional int32 vertex_index = 7;
+    public static final int VERTEX_INDEX_FIELD_NUMBER = 7;
+    private int vertexIndex_;
+    /**
+     * <code>optional int32 vertex_index = 7;</code>
+     */
+    public boolean hasVertexIndex() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional int32 vertex_index = 7;</code>
+     */
+    public int getVertexIndex() {
+      return vertexIndex_;
+    }
+
+    // optional string token_identifier = 8;
+    public static final int TOKEN_IDENTIFIER_FIELD_NUMBER = 8;
     private java.lang.Object tokenIdentifier_;
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
      * </pre>
      */
     public boolean hasTokenIdentifier() {
-      return ((bitField0_ & 0x00000020) == 0x00000020);
+      return ((bitField0_ & 0x00000080) == 0x00000080);
     }
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
@@ -4607,7 +3922,7 @@ public final class LlapDaemonProtocolProtos {
       }
     }
     /**
-     * <code>optional string token_identifier = 6;</code>
+     * <code>optional string token_identifier = 8;</code>
      *
      * <pre>
      * The core vertex stuff 
@@ -4627,151 +3942,151 @@ public final class LlapDaemonProtocolProtos {
       }
     }
 
-    // optional .EntityDescriptorProto processor_descriptor = 7;
-    public static final int PROCESSOR_DESCRIPTOR_FIELD_NUMBER = 7;
+    // optional .EntityDescriptorProto processor_descriptor = 9;
+    public static final int PROCESSOR_DESCRIPTOR_FIELD_NUMBER = 9;
     private 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto
 processorDescriptor_;
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     public boolean hasProcessorDescriptor() {
-      return ((bitField0_ & 0x00000040) == 0x00000040);
+      return ((bitField0_ & 0x00000100) == 0x00000100);
     }
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto
 getProcessorDescriptor() {
       return processorDescriptor_;
     }
     /**
-     * <code>optional .EntityDescriptorProto processor_descriptor = 7;</code>
+     * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder
 getProcessorDescriptorOrBuilder() {
       return processorDescriptor_;
     }
 
-    // repeated .IOSpecProto input_specs = 8;
-    public static final int INPUT_SPECS_FIELD_NUMBER = 8;
+    // repeated .IOSpecProto input_specs = 10;
+    public static final int INPUT_SPECS_FIELD_NUMBER = 10;
     private 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 inputSpecs_;
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     public 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 getInputSpecsList() {
       return inputSpecs_;
     }
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     public java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>
 
         getInputSpecsOrBuilderList() {
       return inputSpecs_;
     }
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     public int getInputSpecsCount() {
       return inputSpecs_.size();
     }
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto 
getInputSpecs(int index) {
       return inputSpecs_.get(index);
     }
     /**
-     * <code>repeated .IOSpecProto input_specs = 8;</code>
+     * <code>repeated .IOSpecProto input_specs = 10;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder
 getInputSpecsOrBuilder(
         int index) {
       return inputSpecs_.get(index);
     }
 
-    // repeated .IOSpecProto output_specs = 9;
-    public static final int OUTPUT_SPECS_FIELD_NUMBER = 9;
+    // repeated .IOSpecProto output_specs = 11;
+    public static final int OUTPUT_SPECS_FIELD_NUMBER = 11;
     private 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 outputSpecs_;
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     public 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>
 getOutputSpecsList() {
       return outputSpecs_;
     }
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     public java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>
 
         getOutputSpecsOrBuilderList() {
       return outputSpecs_;
     }
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     public int getOutputSpecsCount() {
       return outputSpecs_.size();
     }
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto 
getOutputSpecs(int index) {
       return outputSpecs_.get(index);
     }
     /**
-     * <code>repeated .IOSpecProto output_specs = 9;</code>
+     * <code>repeated .IOSpecProto output_specs = 11;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder
 getOutputSpecsOrBuilder(
         int index) {
       return outputSpecs_.get(index);
     }
 
-    // repeated .GroupInputSpecProto grouped_input_specs = 10;
-    public static final int GROUPED_INPUT_SPECS_FIELD_NUMBER = 10;
+    // repeated .GroupInputSpecProto grouped_input_specs = 12;
+    public static final int GROUPED_INPUT_SPECS_FIELD_NUMBER = 12;
     private 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>
 groupedInputSpecs_;
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     public 
java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>
 getGroupedInputSpecsList() {
       return groupedInputSpecs_;
     }
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     public java.util.List<? extends 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder>
 
         getGroupedInputSpecsOrBuilderList() {
       return groupedInputSpecs_;
     }
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     public int getGroupedInputSpecsCount() {
       return groupedInputSpecs_.size();
     }
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto
 getGroupedInputSpecs(int index) {
       return groupedInputSpecs_.get(index);
     }
     /**
-     * <code>repeated .GroupInputSpecProto grouped_input_specs = 10;</code>
+     * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code>
      */
     public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder
 getGroupedInputSpecsOrBuilder(
         int index) {
       return groupedInputSpecs_.get(index);
     }
 
-    // optional int32 vertex_parallelism = 11;
-    public static final int VERTEX_PARALLELISM_FIELD_NUMBER = 11;
+    // optional int32 vertex_parallelism = 13;
+    public static final int VERTEX_PARALLELISM_FIELD_NUMBER = 13;
     private int vertexParallelism_;
     /**
-     * <code>optional int32 vertex_parallelism = 11;</code>
+     * <code>optional int32 vertex_parallelism = 13;</code>
      *
      * <pre>
      * An internal field required for Tez.
      * </pre>
      */
     public boolean hasVertexParallelism() {
-      return ((bitField0_ & 0x00000080) == 0x00000080);
+      return ((bitField0_ & 0x00000200) == 0x00000200);
     }
     /**
-     * <code>optional int32 vertex_parallelism = 11;</code>
+     * <code>optional int32 vertex_parallelism = 13;</code>
      *
      * <pre>
      * An internal field required for Tez.
@@ -4784,9 +4099,11 @@ public final class LlapDaemonProtocolProtos {
     private void initFields() {
       user_ = "";
       signatureKeyId_ = 0L;
-      vertexIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      queryIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
+      hiveQueryId_ = "";
       dagName_ = "";
       vertexName_ = "";
+      vertexIndex_ = 0;
       tokenIdentifier_ = "";
       processorDescriptor_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance();
       inputSpecs_ = java.util.Collections.emptyList();
@@ -4813,31 +4130,37 @@ public final class LlapDaemonProtocolProtos {
         output.writeInt64(2, signatureKeyId_);
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
-        output.writeMessage(3, vertexIdentifier_);
+        output.writeMessage(3, queryIdentifier_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBytes(4, getDagNameBytes());
+        output.writeBytes(4, getHiveQueryIdBytes());
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeBytes(5, getVertexNameBytes());
+        output.writeBytes(5, getDagNameBytes());
       }
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
-        output.writeBytes(6, getTokenIdentifierBytes());
+        output.writeBytes(6, getVertexNameBytes());
       }
       if (((bitField0_ & 0x00000040) == 0x00000040)) {
-        output.writeMessage(7, processorDescriptor_);
+        output.writeInt32(7, vertexIndex_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        output.writeBytes(8, getTokenIdentifierBytes());
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        output.writeMessage(9, processorDescriptor_);
       }
       for (int i = 0; i < inputSpecs_.size(); i++) {
-        output.writeMessage(8, inputSpecs_.get(i));
+        output.writeMessage(10, inputSpecs_.get(i));
       }
       for (int i = 0; i < outputSpecs_.size(); i++) {
-        output.writeMessage(9, outputSpecs_.get(i));
+        output.writeMessage(11, outputSpecs_.get(i));
       }
       for (int i = 0; i < groupedInputSpecs_.size(); i++) {
-        output.writeMessage(10, groupedInputSpecs_.get(i));
+        output.writeMessage(12, groupedInputSpecs_.get(i));
       }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
-        output.writeInt32(11, vertexParallelism_);
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        output.writeInt32(13, vertexParallelism_);
       }
       getUnknownFields().writeTo(output);
     }
@@ -4858,39 +4181,47 @@ public final class LlapDaemonProtocolProtos {
       }
       if (((bitField0_ & 0x00000004) == 0x00000004)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(3, vertexIdentifier_);
+          .computeMessageSize(3, queryIdentifier_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(4, getDagNameBytes());
+          .computeBytesSize(4, getHiveQueryIdBytes());
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(5, getVertexNameBytes());
+          .computeBytesSize(5, getDagNameBytes());
       }
       if (((bitField0_ & 0x00000020) == 0x00000020)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(6, getTokenIdentifierBytes());
+          .computeBytesSize(6, getVertexNameBytes());
       }
       if (((bitField0_ & 0x00000040) == 0x00000040)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(7, processorDescriptor_);
+          .computeInt32Size(7, vertexIndex_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(8, getTokenIdentifierBytes());
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(9, processorDescriptor_);
       }
       for (int i = 0; i < inputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(8, inputSpecs_.get(i));
+          .computeMessageSize(10, inputSpecs_.get(i));
       }
       for (int i = 0; i < outputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(9, outputSpecs_.get(i));
+          .computeMessageSize(11, outputSpecs_.get(i));
       }
       for (int i = 0; i < groupedInputSpecs_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(10, groupedInputSpecs_.get(i));
+          .computeMessageSize(12, groupedInputSpecs_.get(i));
       }
-      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(11, vertexParallelism_);
+          .computeInt32Size(13, vertexParallelism_);
       }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
@@ -4925,10 +4256,15 @@ public final class LlapDaemonProtocolProtos {
         result = result && (getSignatureKeyId()
             == other.getSignatureKeyId());
       }
-      result = result && (hasVertexIdentifier() == 
other.hasVertexIdentifier());
-      if (hasVertexIdentifier()) {
-        result = result && getVertexIdentifier()
-            .equals(other.getVertexIdentifier());
+      result = result && (hasQueryIdentifier() == other.hasQueryIdentifier());
+      if (hasQueryIdentifier()) {
+        result = result && getQueryIdentifier()
+            .equals(other.getQueryIdentifier());
+      }
+      result = result && (hasHiveQueryId() == other.hasHiveQueryId());
+      if (hasHiveQueryId()) {
+        result = result && getHiveQueryId()
+            .equals(other.getHiveQueryId());
       }
       result = result && (hasDagName() == other.hasDagName());
       if (hasDagName()) {
@@ -4940,6 +4276,11 @@ public final class LlapDaemonProtocolProtos {
         result = result && getVertexName()
             .equals(other.getVertexName());
       }
+      result = result && (hasVertexIndex() == other.hasVertexIndex());
+      if (hasVertexIndex()) {
+        result = result && (getVertexIndex()
+            == other.getVertexIndex());
+      }
       result = result && (hasTokenIdentifier() == other.hasTokenIdentifier());
       if (hasTokenIdentifier()) {
         result = result && getTokenIdentifier()
@@ -4982,9 +4323,13 @@ public final class LlapDaemonProtocolProtos {
         hash = (37 * hash) + SIGNATUREKEYID_FIELD_NUMBER;
         hash = (53 * hash) + hashLong(getSignatureKeyId());
       }
-      if (hasVertexIdentifier()) {
-        hash = (37 * hash) + VERTEXIDENTIFIER_FIELD_NUMBER;
-        hash = (53 * hash) + getVertexIdentifier().hashCode();
+      if (hasQueryIdentifier()) {
+        hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER;
+        hash = (53 * hash) + getQueryIdentifier().hashCode();
+      }
+      if (hasHiveQueryId()) {
+        hash = (37 * hash) + HIVE_QUERY_ID_FIELD_NUMBER;
+        hash = (53 * hash) + getHiveQueryId().hashCode();
       }
       if (hasDagName()) {
         hash = (37 * hash) + DAG_NAME_FIELD_NUMBER;
@@ -4994,6 +4339,10 @@ public final class LlapDaemonProtocolProtos {
         hash = (37 * hash) + VERTEX_NAME_FIELD_NUMBER;
         hash = (53 * hash) + getVertexName().hashCode();
       }
+      if (hasVertexIndex()) {
+        hash = (37 * hash) + VERTEX_INDEX_FIELD_NUMBER;
+        hash = (53 * hash) + getVertexIndex();
+      }
       if (hasTokenIdentifier()) {
         hash = (37 * hash) + TOKEN_IDENTIFIER_FIELD_NUMBER;
         hash = (53 * hash) + getTokenIdentifier().hashCode();
@@ -5123,7 +4472,7 @@ public final class LlapDaemonProtocolProtos {
       }
       private void maybeForceBuilderInitialization() {
         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
-          getVertexIdentifierFieldBuilder();
+          getQueryIdentifierFieldBuilder();
           getProcessorDescriptorFieldBuilder();
           getInputSpecsFieldBuilder();
           getOutputSpecsFieldBuilder();
@@ -5140,44 +4489,48 @@ public final class LlapDaemonProtocolProtos {
         bitField0_ = (bitField0_ & ~0x00000001);
         signatureKeyId_ = 0L;
         bitField0_ = (bitField0_ & ~0x00000002);
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
         } else {
-          vertexIdentifierBuilder_.clear();
+          queryIdentifierBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000004);
-        dagName_ = "";
+        hiveQueryId_ = "";
         bitField0_ = (bitField0_ & ~0x00000008);
-        vertexName_ = "";
+        dagName_ = "";
         bitField0_ = (bitField0_ & ~0x00000010);
-        tokenIdentifier_ = "";
+        vertexName_ = "";
         bitField0_ = (bitField0_ & ~0x00000020);
+        vertexIndex_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000040);
+        tokenIdentifier_ = "";
+        bitField0_ = (bitField0_ & ~0x00000080);
         if (processorDescriptorBuilder_ == null) {
           processorDescriptor_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance();
         } else {
           processorDescriptorBuilder_.clear();
         }
-        bitField0_ = (bitField0_ & ~0x00000040);
+        bitField0_ = (bitField0_ & ~0x00000100);
         if (inputSpecsBuilder_ == null) {
           inputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000080);
+          bitField0_ = (bitField0_ & ~0x00000200);
         } else {
           inputSpecsBuilder_.clear();
         }
         if (outputSpecsBuilder_ == null) {
           outputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000100);
+          bitField0_ = (bitField0_ & ~0x00000400);
         } else {
           outputSpecsBuilder_.clear();
         }
         if (groupedInputSpecsBuilder_ == null) {
           groupedInputSpecs_ = java.util.Collections.emptyList();
-          bitField0_ = (bitField0_ & ~0x00000200);
+          bitField0_ = (bitField0_ & ~0x00000800);
         } else {
           groupedInputSpecsBuilder_.clear();
         }
         vertexParallelism_ = 0;
-        bitField0_ = (bitField0_ & ~0x00000400);
+        bitField0_ = (bitField0_ & ~0x00001000);
         return this;
       }
 
@@ -5217,60 +4570,68 @@ public final class LlapDaemonProtocolProtos {
         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
           to_bitField0_ |= 0x00000004;
         }
-        if (vertexIdentifierBuilder_ == null) {
-          result.vertexIdentifier_ = vertexIdentifier_;
+        if (queryIdentifierBuilder_ == null) {
+          result.queryIdentifier_ = queryIdentifier_;
         } else {
-          result.vertexIdentifier_ = vertexIdentifierBuilder_.build();
+          result.queryIdentifier_ = queryIdentifierBuilder_.build();
         }
         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
           to_bitField0_ |= 0x00000008;
         }
-        result.dagName_ = dagName_;
+        result.hiveQueryId_ = hiveQueryId_;
         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
           to_bitField0_ |= 0x00000010;
         }
-        result.vertexName_ = vertexName_;
+        result.dagName_ = dagName_;
         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
           to_bitField0_ |= 0x00000020;
         }
-        result.tokenIdentifier_ = tokenIdentifier_;
+        result.vertexName_ = vertexName_;
         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
           to_bitField0_ |= 0x00000040;
         }
+        result.vertexIndex_ = vertexIndex_;
+        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+          to_bitField0_ |= 0x00000080;
+        }
+        result.tokenIdentifier_ = tokenIdentifier_;
+        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+          to_bitField0_ |= 0x00000100;
+        }
         if (processorDescriptorBuilder_ == null) {
           result.processorDescriptor_ = processorDescriptor_;
         } else {
           result.processorDescriptor_ = processorDescriptorBuilder_.build();
         }
         if (inputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000080) == 0x00000080)) {
+          if (((bitField0_ & 0x00000200) == 0x00000200)) {
             inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000080);
+            bitField0_ = (bitField0_ & ~0x00000200);
           }
           result.inputSpecs_ = inputSpecs_;
         } else {
           result.inputSpecs_ = inputSpecsBuilder_.build();
         }
         if (outputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000100) == 0x00000100)) {
+          if (((bitField0_ & 0x00000400) == 0x00000400)) {
             outputSpecs_ = 
java.util.Collections.unmodifiableList(outputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000100);
+            bitField0_ = (bitField0_ & ~0x00000400);
           }
           result.outputSpecs_ = outputSpecs_;
         } else {
           result.outputSpecs_ = outputSpecsBuilder_.build();
         }
         if (groupedInputSpecsBuilder_ == null) {
-          if (((bitField0_ & 0x00000200) == 0x00000200)) {
+          if (((bitField0_ & 0x00000800) == 0x00000800)) {
             groupedInputSpecs_ = 
java.util.Collections.unmodifiableList(groupedInputSpecs_);
-            bitField0_ = (bitField0_ & ~0x00000200);
+            bitField0_ = (bitField0_ & ~0x00000800);
           }
           result.groupedInputSpecs_ = groupedInputSpecs_;
         } else {
           result.groupedInputSpecs_ = groupedInputSpecsBuilder_.build();
         }
-        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
-          to_bitField0_ |= 0x00000080;
+        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+          to_bitField0_ |= 0x00000200;
         }
         result.vertexParallelism_ = vertexParallelism_;
         result.bitField0_ = to_bitField0_;
@@ -5297,21 +4658,29 @@ public final class LlapDaemonProtocolProtos {
         if (other.hasSignatureKeyId()) {
           setSignatureKeyId(other.getSignatureKeyId());
         }
-        if (other.hasVertexIdentifier()) {
-          mergeVertexIdentifier(other.getVertexIdentifier());
+        if (other.hasQueryIdentifier()) {
+          mergeQueryIdentifier(other.getQueryIdentifier());
+        }
+        if (other.hasHiveQueryId()) {
+          bitField0_ |= 0x00000008;
+          hiveQueryId_ = other.hiveQueryId_;
+          onChanged();
         }
         if (other.hasDagName()) {
-          bitField0_ |= 0x00000008;
+          bitField0_ |= 0x00000010;
           dagName_ = other.dagName_;
           onChanged();
         }
         if (other.hasVertexName()) {
-          bitField0_ |= 0x00000010;
+          bitField0_ |= 0x00000020;
           vertexName_ = other.vertexName_;
           onChanged();
         }
+        if (other.hasVertexIndex()) {
+          setVertexIndex(other.getVertexIndex());
+        }
         if (other.hasTokenIdentifier()) {
-          bitField0_ |= 0x00000020;
+          bitField0_ |= 0x00000080;
           tokenIdentifier_ = other.tokenIdentifier_;
           onChanged();
         }
@@ -5322,7 +4691,7 @@ public final class LlapDaemonProtocolProtos {
           if (!other.inputSpecs_.isEmpty()) {
             if (inputSpecs_.isEmpty()) {
               inputSpecs_ = other.inputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000080);
+              bitField0_ = (bitField0_ & ~0x00000200);
             } else {
               ensureInputSpecsIsMutable();
               inputSpecs_.addAll(other.inputSpecs_);
@@ -5335,7 +4704,7 @@ public final class LlapDaemonProtocolProtos {
               inputSpecsBuilder_.dispose();
               inputSpecsBuilder_ = null;
               inputSpecs_ = other.inputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000080);
+              bitField0_ = (bitField0_ & ~0x00000200);
               inputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getInputSpecsFieldBuilder() : null;
@@ -5348,7 +4717,7 @@ public final class LlapDaemonProtocolProtos {
           if (!other.outputSpecs_.isEmpty()) {
             if (outputSpecs_.isEmpty()) {
               outputSpecs_ = other.outputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000100);
+              bitField0_ = (bitField0_ & ~0x00000400);
             } else {
               ensureOutputSpecsIsMutable();
               outputSpecs_.addAll(other.outputSpecs_);
@@ -5361,7 +4730,7 @@ public final class LlapDaemonProtocolProtos {
               outputSpecsBuilder_.dispose();
               outputSpecsBuilder_ = null;
               outputSpecs_ = other.outputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000100);
+              bitField0_ = (bitField0_ & ~0x00000400);
               outputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getOutputSpecsFieldBuilder() : null;
@@ -5374,7 +4743,7 @@ public final class LlapDaemonProtocolProtos {
           if (!other.groupedInputSpecs_.isEmpty()) {
             if (groupedInputSpecs_.isEmpty()) {
               groupedInputSpecs_ = other.groupedInputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000200);
+              bitField0_ = (bitField0_ & ~0x00000800);
             } else {
               ensureGroupedInputSpecsIsMutable();
               groupedInputSpecs_.addAll(other.groupedInputSpecs_);
@@ -5387,7 +4756,7 @@ public final class LlapDaemonProtocolProtos {
               groupedInputSpecsBuilder_.dispose();
               groupedInputSpecsBuilder_ = null;
               groupedInputSpecs_ = other.groupedInputSpecs_;
-              bitField0_ = (bitField0_ & ~0x00000200);
+              bitField0_ = (bitField0_ & ~0x00000800);
               groupedInputSpecsBuilder_ = 
                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
                    getGroupedInputSpecsFieldBuilder() : null;
@@ -5533,137 +4902,211 @@ public final class LlapDaemonProtocolProtos {
         return this;
       }
 
-      // optional .VertexIdentifier vertexIdentifier = 3;
-      private 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 vertexIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      // optional .QueryIdentifierProto query_identifier = 3;
+      private 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 queryIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
-          
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder>
 vertexIdentifierBuilder_;
+          
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>
 queryIdentifierBuilder_;
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public boolean hasVertexIdentifier() {
+      public boolean hasQueryIdentifier() {
         return ((bitField0_ & 0x00000004) == 0x00000004);
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 getVertexIdentifier() {
-        if (vertexIdentifierBuilder_ == null) {
-          return vertexIdentifier_;
+      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 getQueryIdentifier() {
+        if (queryIdentifierBuilder_ == null) {
+          return queryIdentifier_;
         } else {
-          return vertexIdentifierBuilder_.getMessage();
+          return queryIdentifierBuilder_.getMessage();
         }
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public Builder 
setVertexIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 value) {
-        if (vertexIdentifierBuilder_ == null) {
+      public Builder 
setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 value) {
+        if (queryIdentifierBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
           }
-          vertexIdentifier_ = value;
+          queryIdentifier_ = value;
           onChanged();
         } else {
-          vertexIdentifierBuilder_.setMessage(value);
+          queryIdentifierBuilder_.setMessage(value);
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public Builder setVertexIdentifier(
-          
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder
 builderForValue) {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = builderForValue.build();
+      public Builder setQueryIdentifier(
+          
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder
 builderForValue) {
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = builderForValue.build();
           onChanged();
         } else {
-          vertexIdentifierBuilder_.setMessage(builderForValue.build());
+          queryIdentifierBuilder_.setMessage(builderForValue.build());
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public Builder 
mergeVertexIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier
 value) {
-        if (vertexIdentifierBuilder_ == null) {
+      public Builder 
mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto
 value) {
+        if (queryIdentifierBuilder_ == null) {
           if (((bitField0_ & 0x00000004) == 0x00000004) &&
-              vertexIdentifier_ != 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance())
 {
-            vertexIdentifier_ =
-              
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.newBuilder(vertexIdentifier_).mergeFrom(value).buildPartial();
+              queryIdentifier_ != 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance())
 {
+            queryIdentifier_ =
+              
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial();
           } else {
-            vertexIdentifier_ = value;
+            queryIdentifier_ = value;
           }
           onChanged();
         } else {
-          vertexIdentifierBuilder_.mergeFrom(value);
+          queryIdentifierBuilder_.mergeFrom(value);
         }
         bitField0_ |= 0x00000004;
         return this;
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public Builder clearVertexIdentifier() {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.getDefaultInstance();
+      public Builder clearQueryIdentifier() {
+        if (queryIdentifierBuilder_ == null) {
+          queryIdentifier_ = 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance();
           onChanged();
         } else {
-          vertexIdentifierBuilder_.clear();
+          queryIdentifierBuilder_.clear();
         }
         bitField0_ = (bitField0_ & ~0x00000004);
         return this;
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder
 getVertexIdentifierBuilder() {
+      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder
 getQueryIdentifierBuilder() {
         bitField0_ |= 0x00000004;
         onChanged();
-        return getVertexIdentifierFieldBuilder().getBuilder();
+        return getQueryIdentifierFieldBuilder().getBuilder();
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
-      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder
 getVertexIdentifierOrBuilder() {
-        if (vertexIdentifierBuilder_ != null) {
-          return vertexIdentifierBuilder_.getMessageOrBuilder();
+      public 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder
 getQueryIdentifierOrBuilder() {
+        if (queryIdentifierBuilder_ != null) {
+          return queryIdentifierBuilder_.getMessageOrBuilder();
         } else {
-          return vertexIdentifier_;
+          return queryIdentifier_;
         }
       }
       /**
-       * <code>optional .VertexIdentifier vertexIdentifier = 3;</code>
+       * <code>optional .QueryIdentifierProto query_identifier = 3;</code>
        */
       private com.google.protobuf.SingleFieldBuilder<
-          
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifier.Builder,
 
org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexIdentifierOrBuilder>
 
-          getVertexIdentifierFieldBuilder() {
-        if (vertexIdentifierBuilder_ == null) {
-          vertexIdentifierBuilder_ = new 
com.google.protobuf.SingleFieldBuilder<
-              org.apache.hadoop.hive.llap.daemon.rpc.L

<TRUNCATED>

Reply via email to