Author: sms
Date: Tue Mar 10 23:15:58 2009
New Revision: 752301

URL: http://svn.apache.org/viewvc?rev=752301&view=rev
Log:
PIG-650: pig should look for and use the pig specific 
'pig-cluster-hadoop-site.xml' in the non HOD case just like it does in the HOD 
case

Modified:
    hadoop/pig/trunk/CHANGES.txt
    
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=752301&r1=752300&r2=752301&view=diff
==============================================================================
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Tue Mar 10 23:15:58 2009
@@ -457,3 +457,7 @@
 
     PIG-705: Pig should display a better error message when backend error
     messages cannot be parsed (sms)
+
+    PIG-650: pig should look for and use the pig specific
+    'pig-cluster-hadoop-site.xml' in the non HOD case just like it does in the
+    HOD case (sms)

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java?rev=752301&r1=752300&r2=752301&view=diff
==============================================================================
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/HExecutionEngine.java
 Tue Mar 10 23:15:58 2009
@@ -161,6 +161,15 @@
             // 2. hadoop-site.xml: Site-specific configuration for a given 
hadoop installation.
             // Now add the settings from "properties" object to override any 
existing properties
             // All of the above is accomplished in the method call below
+           
+            JobConf jobConf = new JobConf();
+            jobConf.addResource("pig-cluster-hadoop-site.xml");
+            
+            //the method below alters the properties object by overriding the
+            //hadoop properties with the values from properties and recomputing
+            //the properties
+            recomputeProperties(jobConf, properties);
+            
             configuration = ConfigurationUtil.toConfiguration(properties);     
       
             properties = ConfigurationUtil.toProperties(configuration);
             cluster = properties.getProperty(JOB_TRACKER_LOCATION);
@@ -382,36 +391,11 @@
             JobConf jobConf = new JobConf(hadoopConf);
             jobConf.addResource("pig-cluster-hadoop-site.xml");
 
-                       // We need to load the properties from the hadoop 
configuration
-                       // file we just found in the hod dir.  We want to 
override
-                       // these with any existing properties we have.
-               if (jobConf != null) {
-                   Properties hodProperties = new Properties();
-               Iterator<Map.Entry<String, String>> iter = jobConf.iterator();
-               while (iter.hasNext()) {
-                       Map.Entry<String, String> entry = iter.next();
-                       hodProperties.put(entry.getKey(), entry.getValue());
-               }
-
-               //override hod properties with user defined properties
-               Enumeration<Object> propertiesIter = properties.keys();
-                while (propertiesIter.hasMoreElements()) {
-                    String key = (String) propertiesIter.nextElement();
-                    String val = properties.getProperty(key);
-                    hodProperties.put(key, val);
-                }
-                
-                //clear user defined properties and re-populate
-                properties.clear();
-                Enumeration<Object> hodPropertiesIter = hodProperties.keys();
-                while (hodPropertiesIter.hasMoreElements()) {
-                    String key = (String) hodPropertiesIter.nextElement();
-                    String val = hodProperties.getProperty(key);
-                    properties.put(key, val);
-                }
-
-               }
-
+            //the method below alters the properties object by overriding the
+            //hod properties with the values from properties and recomputing
+            //the properties
+            recomputeProperties(jobConf, properties);
+            
             hdfs = properties.getProperty(FILE_SYSTEM_LOCATION);
             if (hdfs == null) {
                 int errCode = 4007;
@@ -749,6 +733,44 @@
             return Integer.getInteger("hod.nodes", 15);
         }
     }
+    
+    /**
+     * Method to recompute pig properties by overriding hadoop properties
+     * with pig properties
+     * @param conf JobConf with appropriate hadoop resource files
+     * @param properties Pig properties that will override hadoop properties; 
properties might be modified
+     */
+    private void recomputeProperties(JobConf jobConf, Properties properties) {
+        // We need to load the properties from the hadoop configuration
+        // We want to override these with any existing properties we have.
+        if (jobConf != null && properties != null) {
+            Properties hadoopProperties = new Properties();
+            Iterator<Map.Entry<String, String>> iter = jobConf.iterator();
+            while (iter.hasNext()) {
+                Map.Entry<String, String> entry = iter.next();
+                hadoopProperties.put(entry.getKey(), entry.getValue());
+            }
+
+            //override hadoop properties with user defined properties
+            Enumeration<Object> propertiesIter = properties.keys();
+            while (propertiesIter.hasMoreElements()) {
+                String key = (String) propertiesIter.nextElement();
+                String val = properties.getProperty(key);
+                hadoopProperties.put(key, val);
+            }
+            
+            //clear user defined properties and re-populate
+            properties.clear();
+            Enumeration<Object> hodPropertiesIter = hadoopProperties.keys();
+            while (hodPropertiesIter.hasMoreElements()) {
+                String key = (String) hodPropertiesIter.nextElement();
+                String val = hadoopProperties.getProperty(key);
+                properties.put(key, val);
+            }
+
+        }
+    }
+    
 }
 
 


Reply via email to