Author: rohini
Date: Sat Jul 15 20:38:16 2023
New Revision: 1911041

URL: http://svn.apache.org/viewvc?rev=1911041&view=rev
Log:
PIG-5440: Extra jars needed for hive3 (knoguchi via rohini)

Modified:
    pig/branches/branch-0.18/CHANGES.txt
    pig/branches/branch-0.18/build.xml
    pig/branches/branch-0.18/ivy.xml
    pig/branches/branch-0.18/shims/src/hive3/org/apache/pig/hive/HiveShims.java
    
pig/branches/branch-0.18/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java

Modified: pig/branches/branch-0.18/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/pig/branches/branch-0.18/CHANGES.txt?rev=1911041&r1=1911040&r2=1911041&view=diff
==============================================================================
--- pig/branches/branch-0.18/CHANGES.txt (original)
+++ pig/branches/branch-0.18/CHANGES.txt Sat Jul 15 20:38:16 2023
@@ -122,6 +122,8 @@ OPTIMIZATIONS
  
 BUG FIXES
 
+PIG-5440: Extra jars needed for hive3 (knoguchi via rohini)
+
 PIG-5442: Add only credentials from setStoreLocation to the Job Conf (maswin 
via rohini)
 
 PIG-5441: Pig skew join tez grace reducer fails to find shuffle data (yigress 
via rohini)

Modified: pig/branches/branch-0.18/build.xml
URL: 
http://svn.apache.org/viewvc/pig/branches/branch-0.18/build.xml?rev=1911041&r1=1911040&r2=1911041&view=diff
==============================================================================
--- pig/branches/branch-0.18/build.xml (original)
+++ pig/branches/branch-0.18/build.xml Sat Jul 15 20:38:16 2023
@@ -807,6 +807,8 @@
             <fileset dir="${ivy.lib.dir}" includes="hbase-hadoop2*.jar"/>
             <fileset dir="${ivy.lib.dir}" includes="tez-*.jar"/>
             <fileset dir="${ivy.lib.dir}" 
includes="commons-collections4-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="orc-*.jar"/>
+            <fileset dir="${ivy.lib.dir}" includes="aircompressor-*.jar"/>
         </copy>
         <copy file="${output.jarfile.core}" 
tofile="${output.jarfile.backcompat-core-h3}"/>
         <mkdir dir="${legacy.dir}" />

Modified: pig/branches/branch-0.18/ivy.xml
URL: 
http://svn.apache.org/viewvc/pig/branches/branch-0.18/ivy.xml?rev=1911041&r1=1911040&r2=1911041&view=diff
==============================================================================
--- pig/branches/branch-0.18/ivy.xml (original)
+++ pig/branches/branch-0.18/ivy.xml Sat Jul 15 20:38:16 2023
@@ -592,8 +592,11 @@
                 conf="hive3->master" />
     <dependency org="org.apache.hive.shims" name="hive-shims-0.23" 
rev="${hive.version}" changing="true"
                 conf="hive3->master" />
+    <dependency org="org.apache.hive" name="hive-classification" 
rev="${hive.version}" changing="true"
+                conf="hive3->master" />
 
     <dependency org="org.apache.orc" name="orc-core" rev="${orc.version}" 
changing="true" conf="hive3->default" />
+    <dependency org="org.apache.orc" name="orc-shims" rev="${orc.version}" 
changing="true" conf="hive3->default" />
     <dependency org="org.apache.hive" name="hive-storage-api" 
rev="${hive-storage-api.version}" changing="true" conf="hive3->master" />
     <dependency org="org.iq80.snappy" name="snappy" rev="${snappy.version}"
       conf="test->master" />

Modified: 
pig/branches/branch-0.18/shims/src/hive3/org/apache/pig/hive/HiveShims.java
URL: 
http://svn.apache.org/viewvc/pig/branches/branch-0.18/shims/src/hive3/org/apache/pig/hive/HiveShims.java?rev=1911041&r1=1911040&r2=1911041&view=diff
==============================================================================
--- pig/branches/branch-0.18/shims/src/hive3/org/apache/pig/hive/HiveShims.java 
(original)
+++ pig/branches/branch-0.18/shims/src/hive3/org/apache/pig/hive/HiveShims.java 
Sat Jul 15 20:38:16 2023
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.llap.security.LlapSigner;
 import org.apache.hadoop.hive.ql.io.orc.CompressionKind;
 import org.apache.hadoop.hive.ql.io.orc.OrcFile;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
@@ -46,9 +47,11 @@ import org.apache.hadoop.hive.shims.Shim
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.orc.OrcConf;
 import org.apache.orc.OrcFile.Version;
+import org.apache.orc.impl.HadoopShims;
 
 import com.esotericsoftware.kryo.Serializer;
 import com.esotericsoftware.kryo.io.Input;
+import io.airlift.compress.Decompressor;
 
 import org.joda.time.DateTime;
 
@@ -87,13 +90,15 @@ public class HiveShims {
         return new Class[]{OrcFile.class, HiveConf.class, AbstractSerDe.class,
                 org.apache.hadoop.hive.shims.HadoopShims.class, 
HadoopShimsSecure.class, DateWritable.class,
                 hadoopVersionShimsClass, Input.class, 
org.apache.orc.OrcFile.class,
-                com.esotericsoftware.minlog.Log.class};
+                com.esotericsoftware.minlog.Log.class, 
org.apache.orc.impl.HadoopShims.class,
+                io.airlift.compress.Decompressor.class};
     }
 
     public static Class[] getHiveUDFDependentClasses(Class 
hadoopVersionShimsClass) {
         return new Class[]{GenericUDF.class,
                 PrimitiveObjectInspector.class, HiveConf.class, 
Serializer.class, ShimLoader.class,
-                hadoopVersionShimsClass, HadoopShimsSecure.class, 
Collector.class, HiveDecimalWritable.class};
+                hadoopVersionShimsClass, HadoopShimsSecure.class, 
Collector.class, HiveDecimalWritable.class,
+                LlapSigner.class};
     }
 
     public static Object getSearchArgObjValue(Object value) {

Modified: 
pig/branches/branch-0.18/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java
URL: 
http://svn.apache.org/viewvc/pig/branches/branch-0.18/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java?rev=1911041&r1=1911040&r2=1911041&view=diff
==============================================================================
--- 
pig/branches/branch-0.18/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java
 (original)
+++ 
pig/branches/branch-0.18/test/org/apache/pig/test/TestLoaderStorerShipCacheFiles.java
 Sat Jul 15 20:38:16 2023
@@ -50,15 +50,16 @@ public abstract class TestLoaderStorerSh
         if (hiveVersion.equals("3")) {
             String[] expectedJars = new String[] {"hive-common", "hive-exec", 
"hive-serde",
                     "hive-shims-0.23", "hive-shims-common", "orc-core",
-                    "hive-storage-api", "kryo", "minlog"
+                    "hive-storage-api", "kryo", "minlog",
+                    "orc-shims","aircompressor"
             };
 
-            checkPlan(pp, expectedJars, 9, pigServer.getPigContext());
+            checkPlan(pp, expectedJars, expectedJars.length, 
pigServer.getPigContext());
         } else {
             String[] expectedJars = new String[] {"hive-common", "hive-exec", 
"hive-serde",
                     "hive-shims-0.23", "hive-shims-common", "kryo"};
 
-            checkPlan(pp, expectedJars, 6, pigServer.getPigContext());
+            checkPlan(pp, expectedJars, expectedJars.length, 
pigServer.getPigContext());
         }
     }
 
@@ -72,16 +73,17 @@ public abstract class TestLoaderStorerSh
         if (hiveVersion.equals("3")) {
             String[] expectedJars = new String[] {"hive-common", "hive-exec", 
"hive-serde",
                     "hive-shims-0.23", "hive-shims-common", "orc-core",
-                    "hive-storage-api", "kryo", "minlog"
+                    "hive-storage-api", "kryo", "minlog",
+                    "orc-shims", "aircompressor"
             };
 
-            checkPlan(pp, expectedJars, 9, pigServer.getPigContext());
+            checkPlan(pp, expectedJars, expectedJars.length, 
pigServer.getPigContext());
         } else {
             String[] expectedJars = new String[] {"hive-common", "hive-exec", 
"hive-serde",
                     "hive-shims-0.23", "hive-shims-common", "kryo"};
 
 
-            checkPlan(pp, expectedJars, 6, pigServer.getPigContext());
+            checkPlan(pp, expectedJars, expectedJars.length, 
pigServer.getPigContext());
         }
     }
 


Reply via email to