Author: daijy
Date: Tue Sep  7 17:15:15 2010
New Revision: 993434

URL: http://svn.apache.org/viewvc?rev=993434&view=rev
Log:
PIG-1601: Make scalar work for secure hadoop

Modified:
    hadoop/pig/branches/branch-0.8/CHANGES.txt
    
hadoop/pig/branches/branch-0.8/src/org/apache/pig/impl/builtin/ReadScalars.java

Modified: hadoop/pig/branches/branch-0.8/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/CHANGES.txt?rev=993434&r1=993433&r2=993434&view=diff
==============================================================================
--- hadoop/pig/branches/branch-0.8/CHANGES.txt (original)
+++ hadoop/pig/branches/branch-0.8/CHANGES.txt Tue Sep  7 17:15:15 2010
@@ -190,6 +190,8 @@ PIG-1309: Map-side Cogroup (ashutoshc)
 
 BUG FIXES
 
+PIG-1601: Make scalar work for secure hadoop (daijy)
+
 PIG-1602: The .classpath of eclipse template still use hbase-0.20.0 (zjffdu)
 
 PIG-1596: NPE's thrown when attempting to load hbase columns containing null 
values (zjffdu)

Modified: 
hadoop/pig/branches/branch-0.8/src/org/apache/pig/impl/builtin/ReadScalars.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.8/src/org/apache/pig/impl/builtin/ReadScalars.java?rev=993434&r1=993433&r2=993434&view=diff
==============================================================================
--- 
hadoop/pig/branches/branch-0.8/src/org/apache/pig/impl/builtin/ReadScalars.java 
(original)
+++ 
hadoop/pig/branches/branch-0.8/src/org/apache/pig/impl/builtin/ReadScalars.java 
Tue Sep  7 17:15:15 2010
@@ -19,6 +19,7 @@ package org.apache.pig.impl.builtin;
 
 import java.io.IOException;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.pig.EvalFunc;
 import org.apache.pig.backend.executionengine.ExecException;
 import 
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapReduce;
@@ -57,11 +58,15 @@ public class ReadScalars extends EvalFun
             try {
                 pos = DataType.toInteger(input.get(0));
                 scalarfilename = DataType.toString(input.get(1));
+                
+                // Hadoop security need this property to be set
+                Configuration conf = UDFContext.getUDFContext().getJobConf();
+                if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
+                    conf.set("mapreduce.job.credentials.binary", 
+                            System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
+                }
                 loader = new ReadToEndLoader(
-                        new InterStorage(), 
-                        UDFContext.getUDFContext().getJobConf(),
-                        scalarfilename, 0
-                );
+                        new InterStorage(), conf, scalarfilename, 0);
             } catch (Exception e) {
                 throw new ExecException("Failed to open file '" + 
scalarfilename
                         + "'; error = " + e.getMessage());


Reply via email to