This is an automated email from the ASF dual-hosted git repository.

anishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new bdd34fc  HIVE-21651: Move protobuf serde into hive-exec.(Harish JP, 
reviewed by Anishek Agarwal)
bdd34fc is described below

commit bdd34fc4271fe35e84a6b73249d8161d567cf39e
Author: Anishek Agarwal <anis...@gmail.com>
AuthorDate: Mon Apr 29 10:53:03 2019 +0530

    HIVE-21651: Move protobuf serde into hive-exec.(Harish JP, reviewed by 
Anishek Agarwal)
---
 contrib/pom.xml                                    |  61 ---
 .../io/encoded/TestVectorDeserializeOrcWriter.java |   2 +-
 ql/pom.xml                                         |  23 +-
 .../hadoop/hive/ql/io/protobuf}/SampleProtos.java  | 504 ++++++++++-----------
 .../io/protobuf}/ProtobufBytesWritableSerDe.java   |   7 +-
 .../io/protobuf}/ProtobufMessageInputFormat.java   |   2 +-
 .../hive/ql/io/protobuf}/ProtobufMessageSerDe.java |   6 +-
 .../hadoop/hive/ql/io/protobuf}/ProtobufSerDe.java |   8 +-
 .../hadoop/hive/ql/io/protobuf}/package-info.java  |   2 +-
 ql/src/protobuf/{ => java}/HiveEvents.proto        |   0
 .../src/protobuf/test}/SampleProtos.proto          |   2 +-
 .../ql/io/protobuf}/TestProtoMessageSerDe.java     |  13 +-
 12 files changed, 298 insertions(+), 332 deletions(-)

diff --git a/contrib/pom.xml b/contrib/pom.xml
index d569645..ee2013f 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -49,11 +49,6 @@
       <artifactId>hive-shims</artifactId>
       <version>${project.version}</version>
     </dependency>
-    <dependency>
-      <groupId>com.google.protobuf</groupId>
-      <artifactId>protobuf-java</artifactId>
-      <version>${protobuf.version}</version>
-    </dependency>
     <!-- inter-project -->
     <dependency>
       <groupId>commons-codec</groupId>
@@ -85,61 +80,5 @@
   <build>
     <sourceDirectory>${basedir}/src/java</sourceDirectory>
     <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
-    <plugins>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>add-test-sources</id>
-            <phase>generate-test-sources</phase>
-            <goals>
-              <goal>add-test-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>src/gen-test/protobuf/gen-java</source>
-              </sources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
   </build>
-
-  <profiles>
-    <profile>
-      <id>protobuf</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>generate-protobuf-test-sources</id>
-                <phase>generate-test-sources</phase>
-                <configuration>
-                  <target>
-                    <property name="protobuf.src.dir"  
location="${basedir}/src/protobuf-test"/>
-                    <property name="protobuf.build.dir"  
location="${basedir}/src/gen-test/protobuf/gen-java"/>
-                    <echo>Building contrib Protobuf</echo>
-                    <mkdir dir="${protobuf.build.dir}"/>
-                    <exec executable="protoc" failonerror="true">
-                      <arg value="--java_out=${protobuf.build.dir}"/>
-                      <arg value="-I=${protobuf.src.dir}"/>
-                      <arg value="${protobuf.src.dir}/SampleProtos.proto"/>
-                    </exec>
-                  </target>
-                </configuration>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
 </project>
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
 
b/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
index ef7b1a3..647538e 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/io/encoded/TestVectorDeserializeOrcWriter.java
@@ -126,7 +126,7 @@ public class TestVectorDeserializeOrcWriter {
       }
 
       @Override
-      public void consumeData(Object data) throws InterruptedException {
+      public void consumeData(EncodedColumnBatch data) throws 
InterruptedException {
       }
     };
   }
diff --git a/ql/pom.xml b/ql/pom.xml
index 5790f51..7f51888 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -797,7 +797,7 @@
                 <phase>generate-sources</phase>
                 <configuration>
                   <target>
-                    <property name="protobuf.src.dir"  
location="${basedir}/src/protobuf"/>
+                    <property name="protobuf.src.dir"  
location="${basedir}/src/protobuf/java"/>
                     <property name="protobuf.build.dir"  
location="${basedir}/src/gen/protobuf/gen-java"/>
                     <echo>Building ql Protobuf</echo>
                     <mkdir dir="${protobuf.build.dir}"/>
@@ -812,6 +812,26 @@
                   <goal>run</goal>
                 </goals>
               </execution>
+              <execution>
+                <id>generate-protobuf-test-sources</id>
+                <phase>generate-test-sources</phase>
+                <configuration>
+                  <target>
+                    <property name="protobuf.src.dir"  
location="${basedir}/src/protobuf/test"/>
+                    <property name="protobuf.build.dir"  
location="${basedir}/src/gen/protobuf/gen-test"/>
+                    <echo>Building ql test Protobuf</echo>
+                    <mkdir dir="${protobuf.build.dir}"/>
+                    <exec executable="protoc" failonerror="true">
+                      <arg value="--java_out=${protobuf.build.dir}"/>
+                      <arg value="-I=${protobuf.src.dir}"/>
+                      <arg value="${protobuf.src.dir}/SampleProtos.proto"/>
+                    </exec>
+                  </target>
+                </configuration>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+              </execution>
             </executions>
           </plugin>
         </plugins>
@@ -1007,6 +1027,7 @@
             <configuration>
               <sources>
                 
<source>${project.build.directory}/generated-test-sources/java</source>
+                <source>src/gen/protobuf/gen-test</source>
               </sources>
             </configuration>
           </execution>
diff --git 
a/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
 
b/ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
similarity index 87%
rename from 
contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
rename to 
ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
index 8c20e22..ac75608 100644
--- 
a/contrib/src/gen-test/protobuf/gen-java/org/apache/hadoop/hive/contrib/serde2/SampleProtos.java
+++ 
b/ql/src/gen/protobuf/gen-test/org/apache/hadoop/hive/ql/io/protobuf/SampleProtos.java
@@ -1,7 +1,7 @@
 // Generated by the protocol buffer compiler.  DO NOT EDIT!
 // source: SampleProtos.proto
 
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 public final class SampleProtos {
   private SampleProtos() {}
@@ -116,14 +116,14 @@ public final class SampleProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder.class);
     }
 
     public static com.google.protobuf.Parser<MapFieldEntry> PARSER =
@@ -279,53 +279,53 @@ public final class SampleProtos {
       return super.writeReplace();
     }
 
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
parseFrom(byte[] data)
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
parseFrom(java.io.InputStream input)
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
parseDelimitedFrom(java.io.InputStream input)
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
parseDelimitedFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry parseFrom(
+    public static 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -334,7 +334,7 @@ public final class SampleProtos {
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder 
newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
prototype) {
+    public static Builder 
newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -350,20 +350,20 @@ public final class SampleProtos {
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder {
+       implements 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder.class);
+                
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder.class);
       }
 
-      // Construct using 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder()
+      // Construct using 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -396,23 +396,23 @@ public final class SampleProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_MapFieldEntry_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_MapFieldEntry_descriptor;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getDefaultInstanceForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getDefaultInstanceForType() {
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
build() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
result = buildPartial();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
build() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
buildPartial() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
result = new 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry(this);
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
buildPartial() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
result = new 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -429,16 +429,16 @@ public final class SampleProtos {
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) {
-          return 
mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry)other);
+        if (other instanceof 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry) {
+          return 
mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder 
mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
other) {
-        if (other == 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance())
 return this;
+      public Builder 
mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
other) {
+        if (other == 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance())
 return this;
         if (other.hasKey()) {
           bitField0_ |= 0x00000001;
           key_ = other.key_;
@@ -461,11 +461,11 @@ public final class SampleProtos {
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
parsedMessage = null;
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = 
(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry) 
e.getUnfinishedMessage();
+          parsedMessage = 
(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry) 
e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -642,12 +642,12 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 
+    
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 
         getAnotherMapList();
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getAnotherMap(int index);
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getAnotherMap(int index);
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
@@ -655,12 +655,12 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+    java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
         getAnotherMapOrBuilderList();
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
         int index);
 
     // optional .MapFieldEntry noMap = 2;
@@ -671,11 +671,11 @@ public final class SampleProtos {
     /**
      * <code>optional .MapFieldEntry noMap = 2;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getNoMap();
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getNoMap();
     /**
      * <code>optional .MapFieldEntry noMap = 2;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder();
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder();
 
     // repeated int32 intList = 3;
     /**
@@ -744,18 +744,18 @@ public final class SampleProtos {
             }
             case 10: {
               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
-                anotherMap_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>();
+                anotherMap_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>();
                 mutable_bitField0_ |= 0x00000001;
               }
-              
anotherMap_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry));
+              
anotherMap_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry));
               break;
             }
             case 18: {
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
subBuilder = null;
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
subBuilder = null;
               if (((bitField0_ & 0x00000001) == 0x00000001)) {
                 subBuilder = noMap_.toBuilder();
               }
-              noMap_ = 
input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry);
+              noMap_ = 
input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(noMap_);
                 noMap_ = subBuilder.buildPartial();
@@ -804,14 +804,14 @@ public final class SampleProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+              org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder.class);
     }
 
     public static com.google.protobuf.Parser<Mesg1> PARSER =
@@ -832,17 +832,17 @@ public final class SampleProtos {
     private int bitField0_;
     // repeated .MapFieldEntry anotherMap = 1;
     public static final int ANOTHERMAP_FIELD_NUMBER = 1;
-    private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 anotherMap_;
+    private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 anotherMap_;
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 getAnotherMapList() {
+    public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 getAnotherMapList() {
       return anotherMap_;
     }
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+    public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
         getAnotherMapOrBuilderList() {
       return anotherMap_;
     }
@@ -855,20 +855,20 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getAnotherMap(int index) {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getAnotherMap(int index) {
       return anotherMap_.get(index);
     }
     /**
      * <code>repeated .MapFieldEntry anotherMap = 1;</code>
      */
-    public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
+    public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
         int index) {
       return anotherMap_.get(index);
     }
 
     // optional .MapFieldEntry noMap = 2;
     public static final int NOMAP_FIELD_NUMBER = 2;
-    private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
noMap_;
+    private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
noMap_;
     /**
      * <code>optional .MapFieldEntry noMap = 2;</code>
      */
@@ -878,13 +878,13 @@ public final class SampleProtos {
     /**
      * <code>optional .MapFieldEntry noMap = 2;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getNoMap() {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getNoMap() {
       return noMap_;
     }
     /**
      * <code>optional .MapFieldEntry noMap = 2;</code>
      */
-    public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder() {
+    public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder() {
       return noMap_;
     }
 
@@ -913,7 +913,7 @@ public final class SampleProtos {
 
     private void initFields() {
       anotherMap_ = java.util.Collections.emptyList();
-      noMap_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+      noMap_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
       intList_ = java.util.Collections.emptyList();
     }
     private byte memoizedIsInitialized = -1;
@@ -975,53 +975,53 @@ public final class SampleProtos {
       return super.writeReplace();
     }
 
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(byte[] data)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseDelimitedFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseDelimitedFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -1030,7 +1030,7 @@ public final class SampleProtos {
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder 
newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 prototype) {
+    public static Builder 
newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -1046,20 +1046,20 @@ public final class SampleProtos {
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder {
+       implements 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_fieldAccessorTable
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder.class);
+                
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder.class);
       }
 
-      // Construct using 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder()
+      // Construct using 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -1088,7 +1088,7 @@ public final class SampleProtos {
           anotherMapBuilder_.clear();
         }
         if (noMapBuilder_ == null) {
-          noMap_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+          noMap_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
         } else {
           noMapBuilder_.clear();
         }
@@ -1104,23 +1104,23 @@ public final class SampleProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_Mesg1_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_Mesg1_descriptor;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getDefaultInstanceForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getDefaultInstanceForType() {
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 build() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = 
buildPartial();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 build() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 result = 
buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
buildPartial() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 result = new 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1(this);
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
buildPartial() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 result = new 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (anotherMapBuilder_ == null) {
@@ -1151,16 +1151,16 @@ public final class SampleProtos {
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) {
-          return 
mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1)other);
+        if (other instanceof 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1) {
+          return 
mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder 
mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 other) {
-        if (other == 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) 
return this;
+      public Builder 
mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 other) {
+        if (other == 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance()) 
return this;
         if (anotherMapBuilder_ == null) {
           if (!other.anotherMap_.isEmpty()) {
             if (anotherMap_.isEmpty()) {
@@ -1212,11 +1212,11 @@ public final class SampleProtos {
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 parsedMessage 
= null;
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 parsedMessage 
= null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = 
(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1) 
e.getUnfinishedMessage();
+          parsedMessage = 
(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1) 
e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -1228,22 +1228,22 @@ public final class SampleProtos {
       private int bitField0_;
 
       // repeated .MapFieldEntry anotherMap = 1;
-      private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 anotherMap_ =
+      private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 anotherMap_ =
         java.util.Collections.emptyList();
       private void ensureAnotherMapIsMutable() {
         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
-          anotherMap_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>(anotherMap_);
+          anotherMap_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>(anotherMap_);
           bitField0_ |= 0x00000001;
          }
       }
 
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
anotherMapBuilder_;
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
anotherMapBuilder_;
 
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 getAnotherMapList() {
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 getAnotherMapList() {
         if (anotherMapBuilder_ == null) {
           return java.util.Collections.unmodifiableList(anotherMap_);
         } else {
@@ -1263,7 +1263,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getAnotherMap(int index) {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getAnotherMap(int index) {
         if (anotherMapBuilder_ == null) {
           return anotherMap_.get(index);
         } else {
@@ -1274,7 +1274,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder setAnotherMap(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
         if (anotherMapBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -1291,7 +1291,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder setAnotherMap(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (anotherMapBuilder_ == null) {
           ensureAnotherMapIsMutable();
           anotherMap_.set(index, builderForValue.build());
@@ -1304,7 +1304,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public Builder 
addAnotherMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
value) {
+      public Builder 
addAnotherMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
value) {
         if (anotherMapBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -1321,7 +1321,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder addAnotherMap(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
         if (anotherMapBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -1338,7 +1338,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder addAnotherMap(
-          
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (anotherMapBuilder_ == null) {
           ensureAnotherMapIsMutable();
           anotherMap_.add(builderForValue.build());
@@ -1352,7 +1352,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder addAnotherMap(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (anotherMapBuilder_ == null) {
           ensureAnotherMapIsMutable();
           anotherMap_.add(index, builderForValue.build());
@@ -1366,7 +1366,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
       public Builder addAllAnotherMap(
-          java.lang.Iterable<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+          java.lang.Iterable<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry> values) {
         if (anotherMapBuilder_ == null) {
           ensureAnotherMapIsMutable();
           super.addAll(values, anotherMap_);
@@ -1405,14 +1405,14 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
getAnotherMapBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
getAnotherMapBuilder(
           int index) {
         return getAnotherMapFieldBuilder().getBuilder(index);
       }
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getAnotherMapOrBuilder(
           int index) {
         if (anotherMapBuilder_ == null) {
           return anotherMap_.get(index);  } else {
@@ -1422,7 +1422,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+      public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
            getAnotherMapOrBuilderList() {
         if (anotherMapBuilder_ != null) {
           return anotherMapBuilder_.getMessageOrBuilderList();
@@ -1433,31 +1433,31 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
addAnotherMapBuilder() {
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
addAnotherMapBuilder() {
         return getAnotherMapFieldBuilder().addBuilder(
-            
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+            
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
       }
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
addAnotherMapBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
addAnotherMapBuilder(
           int index) {
         return getAnotherMapFieldBuilder().addBuilder(
-            index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+            index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
       }
       /**
        * <code>repeated .MapFieldEntry anotherMap = 1;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder>
 
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder>
 
            getAnotherMapBuilderList() {
         return getAnotherMapFieldBuilder().getBuilderList();
       }
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
           getAnotherMapFieldBuilder() {
         if (anotherMapBuilder_ == null) {
           anotherMapBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
                   anotherMap_,
                   ((bitField0_ & 0x00000001) == 0x00000001),
                   getParentForChildren(),
@@ -1468,9 +1468,9 @@ public final class SampleProtos {
       }
 
       // optional .MapFieldEntry noMap = 2;
-      private org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
noMap_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+      private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
noMap_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
noMapBuilder_;
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
noMapBuilder_;
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
@@ -1480,7 +1480,7 @@ public final class SampleProtos {
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getNoMap() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getNoMap() {
         if (noMapBuilder_ == null) {
           return noMap_;
         } else {
@@ -1490,7 +1490,7 @@ public final class SampleProtos {
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
-      public Builder 
setNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
value) {
+      public Builder 
setNoMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
value) {
         if (noMapBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -1507,7 +1507,7 @@ public final class SampleProtos {
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
       public Builder setNoMap(
-          
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (noMapBuilder_ == null) {
           noMap_ = builderForValue.build();
           onChanged();
@@ -1520,12 +1520,12 @@ public final class SampleProtos {
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
-      public Builder 
mergeNoMap(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
value) {
+      public Builder 
mergeNoMap(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
value) {
         if (noMapBuilder_ == null) {
           if (((bitField0_ & 0x00000002) == 0x00000002) &&
-              noMap_ != 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance())
 {
+              noMap_ != 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance())
 {
             noMap_ =
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.newBuilder(noMap_).mergeFrom(value).buildPartial();
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.newBuilder(noMap_).mergeFrom(value).buildPartial();
           } else {
             noMap_ = value;
           }
@@ -1541,7 +1541,7 @@ public final class SampleProtos {
        */
       public Builder clearNoMap() {
         if (noMapBuilder_ == null) {
-          noMap_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance();
+          noMap_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance();
           onChanged();
         } else {
           noMapBuilder_.clear();
@@ -1552,7 +1552,7 @@ public final class SampleProtos {
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
getNoMapBuilder() {
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
getNoMapBuilder() {
         bitField0_ |= 0x00000002;
         onChanged();
         return getNoMapFieldBuilder().getBuilder();
@@ -1560,7 +1560,7 @@ public final class SampleProtos {
       /**
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder() {
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getNoMapOrBuilder() {
         if (noMapBuilder_ != null) {
           return noMapBuilder_.getMessageOrBuilder();
         } else {
@@ -1571,11 +1571,11 @@ public final class SampleProtos {
        * <code>optional .MapFieldEntry noMap = 2;</code>
        */
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
           getNoMapFieldBuilder() {
         if (noMapBuilder_ == null) {
           noMapBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
                   noMap_,
                   getParentForChildren(),
                   isClean());
@@ -1823,12 +1823,12 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 
+    
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 
         getMapTypeList();
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getMapType(int index);
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getMapType(int index);
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
@@ -1836,12 +1836,12 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+    java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
         getMapTypeOrBuilderList();
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
         int index);
 
     // repeated string stringListType = 17;
@@ -1872,22 +1872,22 @@ public final class SampleProtos {
     /**
      * <code>optional .Mesg1 messageType = 18;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 getMessageType();
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 getMessageType();
     /**
      * <code>optional .Mesg1 messageType = 18;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder();
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder();
 
     // repeated .Mesg1 messageListType = 19;
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> 
+    java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> 
         getMessageListTypeList();
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getMessageListType(int index);
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getMessageListType(int index);
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
@@ -1895,12 +1895,12 @@ public final class SampleProtos {
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
+    java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
         getMessageListTypeOrBuilderList();
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
         int index);
 
     // optional .AllTypes.Enum1 enumType = 20;
@@ -1911,7 +1911,7 @@ public final class SampleProtos {
     /**
      * <code>optional .AllTypes.Enum1 enumType = 20;</code>
      */
-    org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 
getEnumType();
+    org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 
getEnumType();
   }
   /**
    * Protobuf type {@code AllTypes}
@@ -2041,10 +2041,10 @@ public final class SampleProtos {
             }
             case 130: {
               if (!((mutable_bitField0_ & 0x00008000) == 0x00008000)) {
-                mapType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>();
+                mapType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>();
                 mutable_bitField0_ |= 0x00008000;
               }
-              
mapType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry));
+              
mapType_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.PARSER,
 extensionRegistry));
               break;
             }
             case 138: {
@@ -2056,11 +2056,11 @@ public final class SampleProtos {
               break;
             }
             case 146: {
-              org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
subBuilder = null;
+              org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
subBuilder = null;
               if (((bitField0_ & 0x00008000) == 0x00008000)) {
                 subBuilder = messageType_.toBuilder();
               }
-              messageType_ = 
input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER,
 extensionRegistry);
+              messageType_ = 
input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.PARSER,
 extensionRegistry);
               if (subBuilder != null) {
                 subBuilder.mergeFrom(messageType_);
                 messageType_ = subBuilder.buildPartial();
@@ -2070,15 +2070,15 @@ public final class SampleProtos {
             }
             case 154: {
               if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
-                messageListType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1>();
+                messageListType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1>();
                 mutable_bitField0_ |= 0x00040000;
               }
-              
messageListType_.add(input.readMessage(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.PARSER,
 extensionRegistry));
+              
messageListType_.add(input.readMessage(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.PARSER,
 extensionRegistry));
               break;
             }
             case 160: {
               int rawValue = input.readEnum();
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 value = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.valueOf(rawValue);
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 value = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.valueOf(rawValue);
               if (value == null) {
                 unknownFields.mergeVarintField(20, rawValue);
               } else {
@@ -2110,14 +2110,14 @@ public final class SampleProtos {
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
     }
 
     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+      return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Builder.class);
     }
 
     public static com.google.protobuf.Parser<AllTypes> PARSER =
@@ -2192,7 +2192,7 @@ public final class SampleProtos {
       }
       public static final com.google.protobuf.Descriptors.EnumDescriptor
           getDescriptor() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDescriptor().getEnumTypes().get(0);
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDescriptor().getEnumTypes().get(0);
       }
 
       private static final Enum1[] VALUES = values();
@@ -2487,17 +2487,17 @@ public final class SampleProtos {
 
     // repeated .MapFieldEntry mapType = 16;
     public static final int MAPTYPE_FIELD_NUMBER = 16;
-    private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 mapType_;
+    private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 mapType_;
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 getMapTypeList() {
+    public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 getMapTypeList() {
       return mapType_;
     }
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+    public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
         getMapTypeOrBuilderList() {
       return mapType_;
     }
@@ -2510,13 +2510,13 @@ public final class SampleProtos {
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getMapType(int index) {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getMapType(int index) {
       return mapType_.get(index);
     }
     /**
      * <code>repeated .MapFieldEntry mapType = 16;</code>
      */
-    public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
+    public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
         int index) {
       return mapType_.get(index);
     }
@@ -2553,7 +2553,7 @@ public final class SampleProtos {
 
     // optional .Mesg1 messageType = 18;
     public static final int MESSAGETYPE_FIELD_NUMBER = 18;
-    private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
messageType_;
+    private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
messageType_;
     /**
      * <code>optional .Mesg1 messageType = 18;</code>
      */
@@ -2563,29 +2563,29 @@ public final class SampleProtos {
     /**
      * <code>optional .Mesg1 messageType = 18;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getMessageType() {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getMessageType() {
       return messageType_;
     }
     /**
      * <code>optional .Mesg1 messageType = 18;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder() {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder() {
       return messageType_;
     }
 
     // repeated .Mesg1 messageListType = 19;
     public static final int MESSAGELISTTYPE_FIELD_NUMBER = 19;
-    private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> 
messageListType_;
+    private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> 
messageListType_;
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> 
getMessageListTypeList() {
+    public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> 
getMessageListTypeList() {
       return messageListType_;
     }
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
+    public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
         getMessageListTypeOrBuilderList() {
       return messageListType_;
     }
@@ -2598,20 +2598,20 @@ public final class SampleProtos {
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getMessageListType(int index) {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getMessageListType(int index) {
       return messageListType_.get(index);
     }
     /**
      * <code>repeated .Mesg1 messageListType = 19;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
         int index) {
       return messageListType_.get(index);
     }
 
     // optional .AllTypes.Enum1 enumType = 20;
     public static final int ENUMTYPE_FIELD_NUMBER = 20;
-    private org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 
enumType_;
+    private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 
enumType_;
     /**
      * <code>optional .AllTypes.Enum1 enumType = 20;</code>
      */
@@ -2621,7 +2621,7 @@ public final class SampleProtos {
     /**
      * <code>optional .AllTypes.Enum1 enumType = 20;</code>
      */
-    public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 
getEnumType() {
+    public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 
getEnumType() {
       return enumType_;
     }
 
@@ -2643,9 +2643,9 @@ public final class SampleProtos {
       bytesType_ = com.google.protobuf.ByteString.EMPTY;
       mapType_ = java.util.Collections.emptyList();
       stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
-      messageType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+      messageType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
       messageListType_ = java.util.Collections.emptyList();
-      enumType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+      enumType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -2825,53 +2825,53 @@ public final class SampleProtos {
       return super.writeReplace();
     }
 
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(byte[] data)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseDelimitedFrom(java.io.InputStream input)
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseDelimitedFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return PARSER.parseDelimitedFrom(input, extensionRegistry);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return PARSER.parseFrom(input);
     }
-    public static org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parseFrom(
+    public static org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2880,7 +2880,7 @@ public final class SampleProtos {
 
     public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
-    public static Builder 
newBuilder(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
prototype) {
+    public static Builder 
newBuilder(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
prototype) {
       return newBuilder().mergeFrom(prototype);
     }
     public Builder toBuilder() { return newBuilder(this); }
@@ -2896,20 +2896,20 @@ public final class SampleProtos {
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypesOrBuilder {
+       implements 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypesOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
       }
 
       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_fieldAccessorTable
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.class, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Builder.class);
+                
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.class, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Builder.class);
       }
 
-      // Construct using 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.newBuilder()
+      // Construct using 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -2971,7 +2971,7 @@ public final class SampleProtos {
         stringListType_ = com.google.protobuf.LazyStringArrayList.EMPTY;
         bitField0_ = (bitField0_ & ~0x00010000);
         if (messageTypeBuilder_ == null) {
-          messageType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+          messageType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
         } else {
           messageTypeBuilder_.clear();
         }
@@ -2982,7 +2982,7 @@ public final class SampleProtos {
         } else {
           messageListTypeBuilder_.clear();
         }
-        enumType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+        enumType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
         bitField0_ = (bitField0_ & ~0x00080000);
         return this;
       }
@@ -2993,23 +2993,23 @@ public final class SampleProtos {
 
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.internal_static_AllTypes_descriptor;
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.internal_static_AllTypes_descriptor;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
getDefaultInstanceForType() {
-        return 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
getDefaultInstanceForType() {
+        return 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDefaultInstance();
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
build() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = 
buildPartial();
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
build() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes result = 
buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
         return result;
       }
 
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
buildPartial() {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes result = 
new org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes(this);
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
buildPartial() {
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes result = 
new org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes(this);
         int from_bitField0_ = bitField0_;
         int to_bitField0_ = 0;
         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@@ -3114,16 +3114,16 @@ public final class SampleProtos {
       }
 
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) {
-          return 
mergeFrom((org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes)other);
+        if (other instanceof 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes) {
+          return 
mergeFrom((org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder 
mergeFrom(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes other) {
-        if (other == 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.getDefaultInstance())
 return this;
+      public Builder 
mergeFrom(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes other) {
+        if (other == 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.getDefaultInstance())
 return this;
         if (other.hasDoubleType()) {
           setDoubleType(other.getDoubleType());
         }
@@ -3251,11 +3251,11 @@ public final class SampleProtos {
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes 
parsedMessage = null;
+        org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes 
parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = 
(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes) 
e.getUnfinishedMessage();
+          parsedMessage = 
(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes) 
e.getUnfinishedMessage();
           throw e;
         } finally {
           if (parsedMessage != null) {
@@ -3806,22 +3806,22 @@ public final class SampleProtos {
       }
 
       // repeated .MapFieldEntry mapType = 16;
-      private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 mapType_ =
+      private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 mapType_ =
         java.util.Collections.emptyList();
       private void ensureMapTypeIsMutable() {
         if (!((bitField0_ & 0x00008000) == 0x00008000)) {
-          mapType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>(mapType_);
+          mapType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>(mapType_);
           bitField0_ |= 0x00008000;
          }
       }
 
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
mapTypeBuilder_;
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
mapTypeBuilder_;
 
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry>
 getMapTypeList() {
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry>
 getMapTypeList() {
         if (mapTypeBuilder_ == null) {
           return java.util.Collections.unmodifiableList(mapType_);
         } else {
@@ -3841,7 +3841,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
getMapType(int index) {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
getMapType(int index) {
         if (mapTypeBuilder_ == null) {
           return mapType_.get(index);
         } else {
@@ -3852,7 +3852,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder setMapType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
         if (mapTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -3869,7 +3869,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder setMapType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (mapTypeBuilder_ == null) {
           ensureMapTypeIsMutable();
           mapType_.set(index, builderForValue.build());
@@ -3882,7 +3882,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public Builder 
addMapType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry 
value) {
+      public Builder 
addMapType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry 
value) {
         if (mapTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -3899,7 +3899,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder addMapType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry value) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry value) {
         if (mapTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -3916,7 +3916,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder addMapType(
-          
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (mapTypeBuilder_ == null) {
           ensureMapTypeIsMutable();
           mapType_.add(builderForValue.build());
@@ -3930,7 +3930,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder addMapType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
builderForValue) {
         if (mapTypeBuilder_ == null) {
           ensureMapTypeIsMutable();
           mapType_.add(index, builderForValue.build());
@@ -3944,7 +3944,7 @@ public final class SampleProtos {
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
       public Builder addAllMapType(
-          java.lang.Iterable<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry> values) {
+          java.lang.Iterable<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry> values) {
         if (mapTypeBuilder_ == null) {
           ensureMapTypeIsMutable();
           super.addAll(values, mapType_);
@@ -3983,14 +3983,14 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
getMapTypeBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
getMapTypeBuilder(
           int index) {
         return getMapTypeFieldBuilder().getBuilder(index);
       }
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder 
getMapTypeOrBuilder(
           int index) {
         if (mapTypeBuilder_ == null) {
           return mapType_.get(index);  } else {
@@ -4000,7 +4000,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+      public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
            getMapTypeOrBuilderList() {
         if (mapTypeBuilder_ != null) {
           return mapTypeBuilder_.getMessageOrBuilderList();
@@ -4011,31 +4011,31 @@ public final class SampleProtos {
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
addMapTypeBuilder() {
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
addMapTypeBuilder() {
         return getMapTypeFieldBuilder().addBuilder(
-            
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+            
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
       }
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder 
addMapTypeBuilder(
+      public 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder 
addMapTypeBuilder(
           int index) {
         return getMapTypeFieldBuilder().addBuilder(
-            index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.getDefaultInstance());
+            index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.getDefaultInstance());
       }
       /**
        * <code>repeated .MapFieldEntry mapType = 16;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder>
 
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder>
 
            getMapTypeBuilderList() {
         return getMapTypeFieldBuilder().getBuilderList();
       }
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder> 
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder> 
           getMapTypeFieldBuilder() {
         if (mapTypeBuilder_ == null) {
           mapTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntryOrBuilder>(
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntryOrBuilder>(
                   mapType_,
                   ((bitField0_ & 0x00008000) == 0x00008000),
                   getParentForChildren(),
@@ -4139,9 +4139,9 @@ public final class SampleProtos {
       }
 
       // optional .Mesg1 messageType = 18;
-      private org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
messageType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+      private org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
messageType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
messageTypeBuilder_;
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
messageTypeBuilder_;
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
@@ -4151,7 +4151,7 @@ public final class SampleProtos {
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getMessageType() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getMessageType() {
         if (messageTypeBuilder_ == null) {
           return messageType_;
         } else {
@@ -4161,7 +4161,7 @@ public final class SampleProtos {
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
-      public Builder 
setMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 value) {
+      public Builder 
setMessageType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 value) {
         if (messageTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -4178,7 +4178,7 @@ public final class SampleProtos {
        * <code>optional .Mesg1 messageType = 18;</code>
        */
       public Builder setMessageType(
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
builderForValue) {
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
builderForValue) {
         if (messageTypeBuilder_ == null) {
           messageType_ = builderForValue.build();
           onChanged();
@@ -4191,12 +4191,12 @@ public final class SampleProtos {
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
-      public Builder 
mergeMessageType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
value) {
+      public Builder 
mergeMessageType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
value) {
         if (messageTypeBuilder_ == null) {
           if (((bitField0_ & 0x00020000) == 0x00020000) &&
-              messageType_ != 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance()) {
+              messageType_ != 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance()) {
             messageType_ =
-              
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.newBuilder(messageType_).mergeFrom(value).buildPartial();
+              
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.newBuilder(messageType_).mergeFrom(value).buildPartial();
           } else {
             messageType_ = value;
           }
@@ -4212,7 +4212,7 @@ public final class SampleProtos {
        */
       public Builder clearMessageType() {
         if (messageTypeBuilder_ == null) {
-          messageType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance();
+          messageType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance();
           onChanged();
         } else {
           messageTypeBuilder_.clear();
@@ -4223,7 +4223,7 @@ public final class SampleProtos {
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
getMessageTypeBuilder() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
getMessageTypeBuilder() {
         bitField0_ |= 0x00020000;
         onChanged();
         return getMessageTypeFieldBuilder().getBuilder();
@@ -4231,7 +4231,7 @@ public final class SampleProtos {
       /**
        * <code>optional .Mesg1 messageType = 18;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageTypeOrBuilder() {
         if (messageTypeBuilder_ != null) {
           return messageTypeBuilder_.getMessageOrBuilder();
         } else {
@@ -4242,11 +4242,11 @@ public final class SampleProtos {
        * <code>optional .Mesg1 messageType = 18;</code>
        */
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
           getMessageTypeFieldBuilder() {
         if (messageTypeBuilder_ == null) {
           messageTypeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+              org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>(
                   messageType_,
                   getParentForChildren(),
                   isClean());
@@ -4256,22 +4256,22 @@ public final class SampleProtos {
       }
 
       // repeated .Mesg1 messageListType = 19;
-      private 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> 
messageListType_ =
+      private 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> 
messageListType_ =
         java.util.Collections.emptyList();
       private void ensureMessageListTypeIsMutable() {
         if (!((bitField0_ & 0x00040000) == 0x00040000)) {
-          messageListType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1>(messageListType_);
+          messageListType_ = new 
java.util.ArrayList<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1>(messageListType_);
           bitField0_ |= 0x00040000;
          }
       }
 
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
messageListTypeBuilder_;
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
messageListTypeBuilder_;
 
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> 
getMessageListTypeList() {
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> 
getMessageListTypeList() {
         if (messageListTypeBuilder_ == null) {
           return java.util.Collections.unmodifiableList(messageListType_);
         } else {
@@ -4291,7 +4291,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
getMessageListType(int index) {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
getMessageListType(int index) {
         if (messageListTypeBuilder_ == null) {
           return messageListType_.get(index);
         } else {
@@ -4302,7 +4302,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder setMessageListType(
-          int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
value) {
+          int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
value) {
         if (messageListTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -4319,7 +4319,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder setMessageListType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
builderForValue) {
         if (messageListTypeBuilder_ == null) {
           ensureMessageListTypeIsMutable();
           messageListType_.set(index, builderForValue.build());
@@ -4332,7 +4332,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public Builder 
addMessageListType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
value) {
+      public Builder 
addMessageListType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
value) {
         if (messageListTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -4349,7 +4349,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder addMessageListType(
-          int index, org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1 
value) {
+          int index, org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1 
value) {
         if (messageListTypeBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -4366,7 +4366,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder addMessageListType(
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
builderForValue) {
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
builderForValue) {
         if (messageListTypeBuilder_ == null) {
           ensureMessageListTypeIsMutable();
           messageListType_.add(builderForValue.build());
@@ -4380,7 +4380,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder addMessageListType(
-          int index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
builderForValue) {
+          int index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
builderForValue) {
         if (messageListTypeBuilder_ == null) {
           ensureMessageListTypeIsMutable();
           messageListType_.add(index, builderForValue.build());
@@ -4394,7 +4394,7 @@ public final class SampleProtos {
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
       public Builder addAllMessageListType(
-          java.lang.Iterable<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1> values) {
+          java.lang.Iterable<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1> values) {
         if (messageListTypeBuilder_ == null) {
           ensureMessageListTypeIsMutable();
           super.addAll(values, messageListType_);
@@ -4433,14 +4433,14 @@ public final class SampleProtos {
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
getMessageListTypeBuilder(
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
getMessageListTypeBuilder(
           int index) {
         return getMessageListTypeFieldBuilder().getBuilder(index);
       }
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder 
getMessageListTypeOrBuilder(
           int index) {
         if (messageListTypeBuilder_ == null) {
           return messageListType_.get(index);  } else {
@@ -4450,7 +4450,7 @@ public final class SampleProtos {
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public java.util.List<? extends 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
+      public java.util.List<? extends 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
            getMessageListTypeOrBuilderList() {
         if (messageListTypeBuilder_ != null) {
           return messageListTypeBuilder_.getMessageOrBuilderList();
@@ -4461,31 +4461,31 @@ public final class SampleProtos {
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
addMessageListTypeBuilder() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
addMessageListTypeBuilder() {
         return getMessageListTypeFieldBuilder().addBuilder(
-            
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+            
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance());
       }
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder 
addMessageListTypeBuilder(
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder 
addMessageListTypeBuilder(
           int index) {
         return getMessageListTypeFieldBuilder().addBuilder(
-            index, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.getDefaultInstance());
+            index, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.getDefaultInstance());
       }
       /**
        * <code>repeated .Mesg1 messageListType = 19;</code>
        */
-      public 
java.util.List<org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder>
 
+      public 
java.util.List<org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder>
 
            getMessageListTypeBuilderList() {
         return getMessageListTypeFieldBuilder().getBuilderList();
       }
       private com.google.protobuf.RepeatedFieldBuilder<
-          org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder> 
+          org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder> 
           getMessageListTypeFieldBuilder() {
         if (messageListTypeBuilder_ == null) {
           messageListTypeBuilder_ = new 
com.google.protobuf.RepeatedFieldBuilder<
-              org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1OrBuilder>(
+              org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1.Builder, 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1OrBuilder>(
                   messageListType_,
                   ((bitField0_ & 0x00040000) == 0x00040000),
                   getParentForChildren(),
@@ -4496,7 +4496,7 @@ public final class SampleProtos {
       }
 
       // optional .AllTypes.Enum1 enumType = 20;
-      private 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 enumType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+      private 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 enumType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
       /**
        * <code>optional .AllTypes.Enum1 enumType = 20;</code>
        */
@@ -4506,13 +4506,13 @@ public final class SampleProtos {
       /**
        * <code>optional .AllTypes.Enum1 enumType = 20;</code>
        */
-      public org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 
getEnumType() {
+      public org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 
getEnumType() {
         return enumType_;
       }
       /**
        * <code>optional .AllTypes.Enum1 enumType = 20;</code>
        */
-      public Builder 
setEnumType(org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1 
value) {
+      public Builder 
setEnumType(org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1 
value) {
         if (value == null) {
           throw new NullPointerException();
         }
@@ -4526,7 +4526,7 @@ public final class SampleProtos {
        */
       public Builder clearEnumType() {
         bitField0_ = (bitField0_ & ~0x00080000);
-        enumType_ = 
org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1.VAL1;
+        enumType_ = 
org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1.VAL1;
         onChanged();
         return this;
       }
@@ -4583,7 +4583,7 @@ public final class SampleProtos {
       "\017messageListType\030\023 \003(\0132\006.Mesg1\022!\n\010enumTy" +
       "pe\030\024 
\001(\0162\017.AllTypes.Enum1\"\033\n\005Enum1\022\010\n\004VA" +
       "L1\020\001\022\010\n\004VAL2\020\002B5\n%org.apache.hadoop.hive" +
-      ".contrib.serde2B\014SampleProtos"
+      ".ql.io.protobufB\014SampleProtos"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner 
assigner =
       new 
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
similarity index 93%
rename from 
contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
index d6c18ee..9b4af72 100644
--- 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufBytesWritableSerDe.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufBytesWritableSerDe.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 import java.util.Properties;
 
@@ -59,4 +59,9 @@ public class ProtobufBytesWritableSerDe extends ProtobufSerDe 
{
       throw new SerDeException("Unable to parse proto message", e);
     }
   }
+
+  @Override
+  public Class<? extends Writable> getSerializedClass() {
+    return BytesWritable.class;
+  }
 }
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
similarity index 99%
rename from 
contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
index 45c7b5c..a563968 100644
--- 
a/contrib/src/java/org/apache/hadoop/hive/contrib/input/ProtobufMessageInputFormat.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageInputFormat.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.contrib.input;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 import java.io.EOFException;
 import java.io.IOException;
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
 b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
similarity index 88%
rename from 
contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
index d584f78..6fce553 100644
--- 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufMessageSerDe.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufMessageSerDe.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
@@ -34,4 +34,8 @@ public class ProtobufMessageSerDe extends ProtobufSerDe {
     return ((ProtoMessageWritable<Message>)writable).getMessage();
   }
 
+  @Override
+  public Class<? extends Writable> getSerializedClass() {
+    return ProtoMessageWritable.class;
+  }
 }
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
similarity index 98%
rename from 
contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
index 0b7f721..86da30f 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/ProtobufSerDe.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/ProtobufSerDe.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -36,7 +36,6 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Writable;
-import org.apache.tez.dag.history.logging.proto.ProtoMessageWritable;
 
 import com.google.common.collect.Sets;
 import com.google.protobuf.ByteString;
@@ -111,11 +110,6 @@ public abstract class ProtobufSerDe extends AbstractSerDe {
   }
 
   @Override
-  public Class<? extends Writable> getSerializedClass() {
-    return ProtoMessageWritable.class;
-  }
-
-  @Override
   public Writable serialize(Object obj, ObjectInspector objInspector) throws 
SerDeException {
     throw new UnsupportedOperationException("Not implemented serialize");
   }
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
similarity index 94%
rename from 
contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
rename to ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
index e70d245..b6d4209 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/input/package-info.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/protobuf/package-info.java
@@ -19,5 +19,5 @@
 /**
  * This package contains contributed input format.
  */
-package org.apache.hadoop.hive.contrib.input;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
diff --git a/ql/src/protobuf/HiveEvents.proto 
b/ql/src/protobuf/java/HiveEvents.proto
similarity index 100%
rename from ql/src/protobuf/HiveEvents.proto
rename to ql/src/protobuf/java/HiveEvents.proto
diff --git a/contrib/src/protobuf-test/SampleProtos.proto 
b/ql/src/protobuf/test/SampleProtos.proto
similarity index 96%
rename from contrib/src/protobuf-test/SampleProtos.proto
rename to ql/src/protobuf/test/SampleProtos.proto
index c7d0453..139bb7d 100644
--- a/contrib/src/protobuf-test/SampleProtos.proto
+++ b/ql/src/protobuf/test/SampleProtos.proto
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-option java_package = "org.apache.hadoop.hive.contrib.serde2";
+option java_package = "org.apache.hadoop.hive.ql.io.protobuf";
 option java_outer_classname = "SampleProtos";
 
 message MapFieldEntry {
diff --git 
a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
 b/ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
similarity index 95%
rename from 
contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
rename to 
ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
index 4f31f10..490836c 100644
--- 
a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestProtoMessageSerDe.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/io/protobuf/TestProtoMessageSerDe.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.contrib.serde2;
+package org.apache.hadoop.hive.ql.io.protobuf;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
@@ -30,10 +30,13 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.AllTypes.Enum1;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.MapFieldEntry;
-import org.apache.hadoop.hive.contrib.serde2.SampleProtos.Mesg1;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.AllTypes.Enum1;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufBytesWritableSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufMessageSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.ProtobufSerDe;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.MapFieldEntry;
+import org.apache.hadoop.hive.ql.io.protobuf.SampleProtos.Mesg1;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;

Reply via email to