[Pig Wiki] Update of MarkMeissonnier by MarkMeissonni er

2009-10-15 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Pig Wiki for change 
notification.

The MarkMeissonnier page has been changed by MarkMeissonnier:
http://wiki.apache.org/pig/MarkMeissonnier

New page:
#format wiki
#language en
== Mark Meissonnier ==

I am a software engineer who arrived in Silicon Valley on March 2000 (which for 
the anecdote was 1 month after Nasdaq hit it's alltime high of 5000 
points...What is it today?)




CategoryHomepage


svn commit: r825641 [3/3] - in /hadoop/pig/trunk: ./ contrib/zebra/ contrib/zebra/src/java/org/apache/hadoop/zebra/io/ contrib/zebra/src/java/org/apache/hadoop/zebra/mapred/ contrib/zebra/src/java/org

2009-10-15 Thread gates
Modified: 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageCollection.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageCollection.java?rev=825641r1=825640r2=825641view=diff
==
--- 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageCollection.java
 (original)
+++ 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageCollection.java
 Thu Oct 15 20:38:08 2009
@@ -25,12 +25,12 @@
 import junit.framework.Assert;
 
 import org.apache.hadoop.zebra.types.CGSchema;
-import org.apache.hadoop.zebra.types.ColumnType;
-import org.apache.hadoop.zebra.types.ParseException;
+import org.apache.hadoop.zebra.schema.ColumnType;
+import org.apache.hadoop.zebra.parser.ParseException;
 import org.apache.hadoop.zebra.types.Partition;
-import org.apache.hadoop.zebra.types.Schema;
-import org.apache.hadoop.zebra.types.TableSchemaParser;
-import org.apache.hadoop.zebra.types.Schema.ColumnSchema;
+import org.apache.hadoop.zebra.schema.Schema;
+import org.apache.hadoop.zebra.parser.TableSchemaParser;
+import org.apache.hadoop.zebra.schema.Schema.ColumnSchema;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -64,11 +64,11 @@
   CGSchema cgs2 = cgschemas[1];
 
   ColumnSchema f11 = cgs1.getSchema().getColumn(0);
-  Assert.assertEquals(c1, f11.name);
-  Assert.assertEquals(ColumnType.COLLECTION, f11.type);
+  Assert.assertEquals(c1, f11.getName());
+  Assert.assertEquals(ColumnType.COLLECTION, f11.getType());
   ColumnSchema f21 = cgs2.getSchema().getColumn(0);
-  Assert.assertEquals(c2, f21.name);
-  Assert.assertEquals(ColumnType.COLLECTION, f21.type);
+  Assert.assertEquals(c2, f21.getName());
+  Assert.assertEquals(ColumnType.COLLECTION, f21.getType());
 
   System.out.println(*** Column Map **);
   MapString, HashSetPartition.PartitionInfo.ColumnMappingEntry colmap 
= p
@@ -125,14 +125,14 @@
   CGSchema cgs2 = cgschemas[1];
 
   ColumnSchema f11 = cgs1.getSchema().getColumn(0);
-  Assert.assertEquals(c1.f1, f11.name);
-  Assert.assertEquals(ColumnType.INT, f11.type);
+  Assert.assertEquals(c1.f1, f11.getName());
+  Assert.assertEquals(ColumnType.INT, f11.getType());
   ColumnSchema f21 = cgs2.getSchema().getColumn(0);
-  Assert.assertEquals(c1.f2, f21.name);
-  Assert.assertEquals(ColumnType.INT, f21.type);
+  Assert.assertEquals(c1.f2, f21.getName());
+  Assert.assertEquals(ColumnType.INT, f21.getType());
   ColumnSchema f22 = cgs2.getSchema().getColumn(1);
-  Assert.assertEquals(c2, f22.name);
-  Assert.assertEquals(ColumnType.COLLECTION, f22.type);
+  Assert.assertEquals(c2, f22.getName());
+  Assert.assertEquals(ColumnType.COLLECTION, f22.getType());
 
   System.out.println(*** Column Map **);
   MapString, HashSetPartition.PartitionInfo.ColumnMappingEntry colmap 
= p

Modified: 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageMap.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageMap.java?rev=825641r1=825640r2=825641view=diff
==
--- 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageMap.java
 (original)
+++ 
hadoop/pig/trunk/contrib/zebra/src/test/org/apache/hadoop/zebra/types/TestStorageMap.java
 Thu Oct 15 20:38:08 2009
@@ -26,12 +26,12 @@
 import junit.framework.Assert;
 
 import org.apache.hadoop.zebra.types.CGSchema;
-import org.apache.hadoop.zebra.types.ColumnType;
-import org.apache.hadoop.zebra.types.ParseException;
+import org.apache.hadoop.zebra.schema.ColumnType;
+import org.apache.hadoop.zebra.parser.ParseException;
 import org.apache.hadoop.zebra.types.Partition;
-import org.apache.hadoop.zebra.types.Schema;
-import org.apache.hadoop.zebra.types.TableSchemaParser;
-import org.apache.hadoop.zebra.types.Schema.ColumnSchema;
+import org.apache.hadoop.zebra.schema.Schema;
+import org.apache.hadoop.zebra.parser.TableSchemaParser;
+import org.apache.hadoop.zebra.schema.Schema.ColumnSchema;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -67,23 +67,23 @@
   CGSchema cgs3 = cgschemas[2];
 
   ColumnSchema f11 = cgs1.getSchema().getColumn(0);
-  Assert.assertEquals(f11.name, m1);
-  Assert.assertEquals(ColumnType.MAP, f11.type);
+  Assert.assertEquals(f11.getName(), m1);
+  Assert.assertEquals(ColumnType.MAP, f11.getType());
 
   ColumnSchema f21 = cgs2.getSchema().getColumn(0);
-  Assert.assertEquals(f21.name, m2);
+  Assert.assertEquals(f21.getName(), m2);
   // TODO: type should be MAP!
-  Assert.assertEquals(ColumnType.MAP, f21.type);
+  

svn commit: r825659 - in /hadoop/pig/trunk: ./ src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/ src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/ src

2009-10-15 Thread olga
Author: olga
Date: Thu Oct 15 21:04:45 2009
New Revision: 825659

URL: http://svn.apache.org/viewvc?rev=825659view=rev
Log:
PIG-1018: FINDBUGS: NM_FIELD_NAMING_CONVENTION: Field names should start with
a lower case letter (olgan)

Removed:

hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/util/operatorHelper.java
Modified:
hadoop/pig/trunk/CHANGES.txt

hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java

hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/relationalOperators/POForEach.java

hadoop/pig/trunk/src/org/apache/pig/backend/local/executionengine/physicalLayer/LocalLogToPhyTranslationVisitor.java
hadoop/pig/trunk/src/org/apache/pig/pen/DerivedDataVisitor.java
hadoop/pig/trunk/src/org/apache/pig/pen/EquivalenceClasses.java
hadoop/pig/trunk/src/org/apache/pig/pen/ExampleGenerator.java
hadoop/pig/trunk/src/org/apache/pig/pen/util/DisplayExamples.java
hadoop/pig/trunk/test/findbugsExcludeFile.xml

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=825659r1=825658r2=825659view=diff
==
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Thu Oct 15 21:04:45 2009
@@ -26,6 +26,9 @@
 
 IMPROVEMENTS
 
+PIG-1018: FINDBUGS: NM_FIELD_NAMING_CONVENTION: Field names should start with
+a lower case letter (olgan)
+
 PIG-1023: FINDBUGS: exclude CN_IDIOM_NO_SUPER_CALL (olgan)
 
 PIG-1019: added findbugs exclusion file (olgan)

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java?rev=825659r1=825658r2=825659view=diff
==
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/physicalLayer/LogToPhyTranslationVisitor.java
 Thu Oct 15 21:04:45 2009
@@ -66,7 +66,7 @@
 
 public class LogToPhyTranslationVisitor extends LOVisitor {
 
-protected MapLogicalOperator, PhysicalOperator LogToPhyMap;
+protected MapLogicalOperator, PhysicalOperator logToPhyMap;
 
 Random r = new Random();
 
@@ -88,7 +88,7 @@
 
 currentPlans = new StackPhysicalPlan();
 currentPlan = new PhysicalPlan();
-LogToPhyMap = new HashMapLogicalOperator, PhysicalOperator();
+logToPhyMap = new HashMapLogicalOperator, PhysicalOperator();
 }
 
 public void setPigContext(PigContext pc) {
@@ -107,19 +107,19 @@
 scope, nodeGen.getNextNodeId(scope)), op
 .getRequestedParallelism());
 exprOp.setOperandType(op.getLhsOperand().getType());
-exprOp.setLhs((ExpressionOperator) 
LogToPhyMap.get(op.getLhsOperand()));
-exprOp.setRhs((ExpressionOperator) 
LogToPhyMap.get(op.getRhsOperand()));
+exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
+exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
 LogicalPlan lp = op.getPlan();
 
 currentPlan.add(exprOp);
-LogToPhyMap.put(op, exprOp);
+logToPhyMap.put(op, exprOp);
 
 ListLogicalOperator predecessors = lp.getPredecessors(op);
 
 if (predecessors == null)
 return;
 for (LogicalOperator lo : predecessors) {
-PhysicalOperator from = LogToPhyMap.get(lo);
+PhysicalOperator from = logToPhyMap.get(lo);
 try {
 // currentExprPlan.connect(from, exprOp);
 currentPlan.connect(from, exprOp);
@@ -138,19 +138,19 @@
 scope, nodeGen.getNextNodeId(scope)), op
 .getRequestedParallelism());
 exprOp.setOperandType(op.getLhsOperand().getType());
-exprOp.setLhs((ExpressionOperator) 
LogToPhyMap.get(op.getLhsOperand()));
-exprOp.setRhs((ExpressionOperator) 
LogToPhyMap.get(op.getRhsOperand()));
+exprOp.setLhs((ExpressionOperator) 
logToPhyMap.get(op.getLhsOperand()));
+exprOp.setRhs((ExpressionOperator) 
logToPhyMap.get(op.getRhsOperand()));
 LogicalPlan lp = op.getPlan();
 
 currentPlan.add(exprOp);
-LogToPhyMap.put(op, exprOp);
+logToPhyMap.put(op, exprOp);
 
 ListLogicalOperator predecessors = lp.getPredecessors(op);
 
 if (predecessors == null)
 return;
 for (LogicalOperator lo : predecessors) {
-PhysicalOperator from = LogToPhyMap.get(lo);
+PhysicalOperator from = logToPhyMap.get(lo);
 try {
 

[Pig Wiki] Update of PigErrorHandlingFunctionalSpecifica tion by daijy

2009-10-15 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Pig Wiki for change 
notification.

The PigErrorHandlingFunctionalSpecification page has been changed by daijy:
http://wiki.apache.org/pig/PigErrorHandlingFunctionalSpecification?action=diffrev1=127rev2=128

  ||1105||Heap percentage / Conversion factor cannot be set to 0 ||
  ||1106||Merge join is possible only for simple column or '*' join keys when 
using funcspec as the loader ||
  ||1107||Try to merge incompatible types (eg. numerical type vs non-numeircal 
type)||
+ ||1108||Duplicated schema||
- ||2000||Internal error. Mismatch in group by arities. Expected: schema. 
Found: schema||
+ ||20008||Internal error. Mismatch in group by arities. Expected: schema. 
Found: schema||
  ||2001||Unable to clone plan before compiling||
  ||2002||The output file(s): filename  already exists||
  ||2003||Cannot read from the storage where the output filename will be 
stored||


svn commit: r825712 - in /hadoop/pig/trunk: CHANGES.txt src/org/apache/pig/Main.java src/org/apache/pig/impl/io/FileLocalizer.java src/org/apache/pig/impl/util/PropertiesUtil.java src/org/apache/pig/t

2009-10-15 Thread olga
Author: olga
Date: Fri Oct 16 00:16:57 2009
New Revision: 825712

URL: http://svn.apache.org/viewvc?rev=825712view=rev
Log:
PIG-1009: FINDBUGS: OS_OPEN_STREAM: Method may fail to close stream (olgan)

Modified:
hadoop/pig/trunk/CHANGES.txt
hadoop/pig/trunk/src/org/apache/pig/Main.java
hadoop/pig/trunk/src/org/apache/pig/impl/io/FileLocalizer.java
hadoop/pig/trunk/src/org/apache/pig/impl/util/PropertiesUtil.java

hadoop/pig/trunk/src/org/apache/pig/tools/parameters/PreprocessorContext.java

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=825712r1=825711r2=825712view=diff
==
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Fri Oct 16 00:16:57 2009
@@ -26,6 +26,8 @@
 
 IMPROVEMENTS
 
+PIG-1009: FINDBUGS: OS_OPEN_STREAM: Method may fail to close stream (olgan)
+
 PIG-1008: FINDBUGS: NP_TOSTRING_COULD_RETURN_NULL (olgan)
 
 PIG-1018: FINDBUGS: NM_FIELD_NAMING_CONVENTION: Field names should start with

Modified: hadoop/pig/trunk/src/org/apache/pig/Main.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/Main.java?rev=825712r1=825711r2=825712view=diff
==
--- hadoop/pig/trunk/src/org/apache/pig/Main.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/Main.java Fri Oct 16 00:16:57 2009
@@ -463,15 +463,20 @@
 }
 
 Properties props = new Properties();
+FileReader propertyReader = null;
 if (log4jconf != null) {
 try {
-FileReader propertyReader = new FileReader(log4jconf);
+propertyReader = new FileReader(log4jconf);
 props.load(propertyReader);
 }
 catch (IOException e)
 {
 System.err.println(Warn: Cannot open log4j properties file, use 
default);
 }
+finally
+{
+if (propertyReader != null) try {propertyReader.close();} 
catch(Exception e) {}
+}
 }
 if (props.size() == 0) {
 props.setProperty(log4j.rootLogger, INFO, PIGCONSOLE);

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/io/FileLocalizer.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/io/FileLocalizer.java?rev=825712r1=825711r2=825712view=diff
==
--- hadoop/pig/trunk/src/org/apache/pig/impl/io/FileLocalizer.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/io/FileLocalizer.java Fri Oct 16 
00:16:57 2009
@@ -664,12 +664,16 @@
 if (exitVal != 0)
 return null;
 String line = null;
+BufferedReader br = null;
 try {
 InputStreamReader isr = new InputStreamReader(p.getInputStream());
-BufferedReader br = new BufferedReader(isr);
+br = new BufferedReader(isr);
 line = br.readLine();
+isr.close();
 } catch (IOException e) {
 return null;
+} finally {
+if (br != null) try {br.close();} catch (Exception e) {}
 }
 return line;
 }

Modified: hadoop/pig/trunk/src/org/apache/pig/impl/util/PropertiesUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/impl/util/PropertiesUtil.java?rev=825712r1=825711r2=825712view=diff
==
--- hadoop/pig/trunk/src/org/apache/pig/impl/util/PropertiesUtil.java (original)
+++ hadoop/pig/trunk/src/org/apache/pig/impl/util/PropertiesUtil.java Fri Oct 
16 00:16:57 2009
@@ -35,9 +35,11 @@
 private final static Log log = LogFactory.getLog(PropertiesUtil.class);
 
 public static void loadPropertiesFromFile(Properties properties) {
+InputStream inputStream = null;
+BufferedInputStream bis = null;
 try {
 ClassPropertiesUtil clazz = PropertiesUtil.class;
-InputStream inputStream = clazz
+inputStream = clazz
 .getResourceAsStream(PROPERTIES_FILE);
 if (inputStream == null) {
 String msg = no pig.properties configuration file available 
in the classpath;
@@ -47,6 +49,8 @@
 }
 } catch (Exception e) {
 log.error(unable to parse pig.properties :, e);
+} finally {
+if (inputStream != null) try {inputStream.close();} catch 
(Exception e) {}
 }
 
 Properties pigrcProps = new Properties() ;
@@ -55,10 +59,14 @@
 if (pigrcFile.exists()) {
 log.warn(pigrcFile.getAbsolutePath()
 +  exists but will be deprecated soon. Use 
conf/pig.properties instead!);
-pigrcProps.load(new BufferedInputStream(new 
FileInputStream(pigrcFile))) ;
+
+bis = new BufferedInputStream(new