svn commit: r934487 - in /hadoop/pig/branches/branch-0.6: CHANGES.txt build.xml

2010-04-15 Thread pradeepkth
Author: pradeepkth
Date: Thu Apr 15 17:02:46 2010
New Revision: 934487

URL: http://svn.apache.org/viewvc?rev=934487view=rev
Log:
PIG-1325: Provide a way to exclude a testcase when running ant test 
(pradeepkth)

Modified:
hadoop/pig/branches/branch-0.6/CHANGES.txt
hadoop/pig/branches/branch-0.6/build.xml

Modified: hadoop/pig/branches/branch-0.6/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.6/CHANGES.txt?rev=934487r1=934486r2=934487view=diff
==
--- hadoop/pig/branches/branch-0.6/CHANGES.txt (original)
+++ hadoop/pig/branches/branch-0.6/CHANGES.txt Thu Apr 15 17:02:46 2010
@@ -26,6 +26,9 @@ PIG-922: Logical optimizer: push up proj
 
 IMPROVEMENTS
 
+PIG-1325: Provide a way to exclude a testcase when running ant test
+(pradeepkth)
+
 PIG-1376: Pig 060 Docs - ILLUSTRATE and Passing Configurations to UDFs 
(chandec via olgan)
 
 PIG-1214: Pig 0.6 Docs fixes (chandec via olgan)

Modified: hadoop/pig/branches/branch-0.6/build.xml
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.6/build.xml?rev=934487r1=934486r2=934487view=diff
==
--- hadoop/pig/branches/branch-0.6/build.xml (original)
+++ hadoop/pig/branches/branch-0.6/build.xml Thu Apr 15 17:02:46 2010
@@ -539,6 +539,7 @@
 exclude name=**/TestOrderBy2.java /
 exclude name=**/TestPi.java /
 exclude name=**/nightly/** /
+exclude name=**/${exclude.testcase}.java 
if=exclude.testcase /
 /fileset
 /batchtest
 batchtest fork=yes todir=${test.log.dir} if=testcase




svn commit: r934488 - in /hadoop/pig/trunk: CHANGES.txt src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java src/org/apache/pig/backend/hadoop/executionengine/mapReduce

2010-04-15 Thread pradeepkth
Author: pradeepkth
Date: Thu Apr 15 17:04:25 2010
New Revision: 934488

URL: http://svn.apache.org/viewvc?rev=934488view=rev
Log:
PIG-1372: Restore PigInputFormat.sJob for backward compatibility (pradeepkth)

Modified:
hadoop/pig/trunk/CHANGES.txt

hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java

hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java

Modified: hadoop/pig/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/CHANGES.txt?rev=934488r1=934487r2=934488view=diff
==
--- hadoop/pig/trunk/CHANGES.txt (original)
+++ hadoop/pig/trunk/CHANGES.txt Thu Apr 15 17:04:25 2010
@@ -43,6 +43,8 @@ PIG-1309: Map-side Cogroup (ashutoshc)
 
 BUG FIXES
 
+PIG-1372: Restore PigInputFormat.sJob for backward compatibility (pradeepkth)
+
 PIG-1369: POProject does not handle null tuples and non existent fields in
 some cases (pradeepkth)
 

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java?rev=934488r1=934487r2=934488view=diff
==
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
 Thu Apr 15 17:04:25 2010
@@ -47,6 +47,7 @@ import org.apache.pig.impl.io.FileSpec;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.Pair;
+import org.apache.pig.impl.util.UDFContext;
 
 public class PigInputFormat extends InputFormatText, Tuple {
 
@@ -62,6 +63,14 @@ public class PigInputFormat extends Inpu
 
 public static final String PIG_INPUTS = pig.inputs;
 
+/**
+ * @deprecated Use {...@link UDFContext} instead in the following way to 
get 
+ * the job's {...@link Configuration}:
+ * preUdfContext.getUdfContext().getJobConf()/pre
+ */
+@Deprecated
+public static Configuration sJob;
+
 /* (non-Javadoc)
  * @see 
org.apache.hadoop.mapreduce.InputFormat#createRecordReader(org.apache.hadoop.mapreduce.InputSplit,
 org.apache.hadoop.mapreduce.TaskAttemptContext)
  */
@@ -93,6 +102,10 @@ public class PigInputFormat extends Inpu
 
 // merge entries from split specific conf into the conf we got
 PigInputFormat.mergeSplitSpecificConf(loadFunc, pigSplit, conf);
+
+// for backward compatibility
+PigInputFormat.sJob = conf;
+
 InputFormat inputFormat = loadFunc.getInputFormat();
 // now invoke the createRecordReader() with this adjusted conf
 RecordReader reader = inputFormat.createRecordReader(

Modified: 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=934488r1=934487r2=934488view=diff
==
--- 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 (original)
+++ 
hadoop/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 Thu Apr 15 17:04:25 2010
@@ -54,6 +54,7 @@ import org.apache.pig.impl.plan.Dependen
 import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.SpillableMemoryManager;
+import org.apache.pig.impl.util.UDFContext;
 import org.apache.pig.tools.pigstats.PigStatusReporter;
 
 /**
@@ -82,6 +83,15 @@ import org.apache.pig.tools.pigstats.Pig
 public class PigMapReduce {
 
 public static JobContext sJobContext = null;
+
+/**
+ * @deprecated Use {...@link UDFContext} instead in the following way to 
get 
+ * the job's {...@link Configuration}:
+ * preUdfContext.getUdfContext().getJobConf()/pre
+ */
+// This is used by internal pig code - it is deprecated for user code but 
is
+// used by Pig internal code to set up UDFContext's conf among other 
things.
+@Deprecated
 public static Configuration sJobConf = null;
 private final static Tuple DUMMYTUPLE = null;
 




svn commit: r934492 - in /hadoop/pig/branches/branch-0.7: CHANGES.txt src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java src/org/apache/pig/backend/hadoop/executionen

2010-04-15 Thread pradeepkth
Author: pradeepkth
Date: Thu Apr 15 17:22:49 2010
New Revision: 934492

URL: http://svn.apache.org/viewvc?rev=934492view=rev
Log:
PIG-1372: Restore PigInputFormat.sJob for backward compatibility (pradeepkth)

Modified:
hadoop/pig/branches/branch-0.7/CHANGES.txt

hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java

hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java

Modified: hadoop/pig/branches/branch-0.7/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.7/CHANGES.txt?rev=934492r1=934491r2=934492view=diff
==
--- hadoop/pig/branches/branch-0.7/CHANGES.txt (original)
+++ hadoop/pig/branches/branch-0.7/CHANGES.txt Thu Apr 15 17:22:49 2010
@@ -183,6 +183,8 @@ OPTIMIZATIONS
 
 BUG FIXES
 
+PIG-1372: Restore PigInputFormat.sJob for backward compatibility (pradeepkth)
+
 PIG-1369: POProject does not handle null tuples and non existent fields in
 some cases (pradeepkth)
 

Modified: 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java?rev=934492r1=934491r2=934492view=diff
==
--- 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
 (original)
+++ 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigInputFormat.java
 Thu Apr 15 17:22:49 2010
@@ -47,6 +47,7 @@ import org.apache.pig.impl.io.FileSpec;
 import org.apache.pig.impl.plan.OperatorKey;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.Pair;
+import org.apache.pig.impl.util.UDFContext;
 
 public class PigInputFormat extends InputFormatText, Tuple {
 
@@ -62,6 +63,14 @@ public class PigInputFormat extends Inpu
 
 public static final String PIG_INPUTS = pig.inputs;
 
+/**
+ * @deprecated Use {...@link UDFContext} instead in the following way to 
get 
+ * the job's {...@link Configuration}:
+ * preUdfContext.getUdfContext().getJobConf()/pre
+ */
+@Deprecated
+public static Configuration sJob;
+
 /* (non-Javadoc)
  * @see 
org.apache.hadoop.mapreduce.InputFormat#createRecordReader(org.apache.hadoop.mapreduce.InputSplit,
 org.apache.hadoop.mapreduce.TaskAttemptContext)
  */
@@ -93,6 +102,10 @@ public class PigInputFormat extends Inpu
 
 // merge entries from split specific conf into the conf we got
 PigInputFormat.mergeSplitSpecificConf(loadFunc, pigSplit, conf);
+
+// for backward compatibility
+PigInputFormat.sJob = conf;
+
 InputFormat inputFormat = loadFunc.getInputFormat();
 // now invoke the createRecordReader() with this adjusted conf
 RecordReader reader = inputFormat.createRecordReader(

Modified: 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java?rev=934492r1=934491r2=934492view=diff
==
--- 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 (original)
+++ 
hadoop/pig/branches/branch-0.7/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/PigMapReduce.java
 Thu Apr 15 17:22:49 2010
@@ -54,6 +54,7 @@ import org.apache.pig.impl.plan.Dependen
 import org.apache.pig.impl.plan.VisitorException;
 import org.apache.pig.impl.util.ObjectSerializer;
 import org.apache.pig.impl.util.SpillableMemoryManager;
+import org.apache.pig.impl.util.UDFContext;
 
 /**
  * This class is the static Mapper amp; Reducer classes that
@@ -81,6 +82,15 @@ import org.apache.pig.impl.util.Spillabl
 public class PigMapReduce {
 
 public static JobContext sJobContext = null;
+
+/**
+ * @deprecated Use {...@link UDFContext} instead in the following way to 
get 
+ * the job's {...@link Configuration}:
+ * preUdfContext.getUdfContext().getJobConf()/pre
+ */
+// This is used by internal pig code - it is deprecated for user code but 
is
+// used by Pig internal code to set up UDFContext's conf among other 
things.
+@Deprecated
 public static Configuration sJobConf = null;
 private final static Tuple DUMMYTUPLE = null;
 




svn commit: r934649 [17/24] - in /hadoop/pig/trunk/contrib: ./ owl/ owl/bin/ owl/ci/ owl/ci/test_results/ owl/ci/test_scripts/ owl/docs/ owl/ivy/ owl/java/ owl/java/lib/ owl/java/main/ owl/java/main/M

2010-04-15 Thread gates
Added: 
hadoop/pig/trunk/contrib/owl/java/test/org/apache/hadoop/owl/client/MultiplePartitionIntervalTest.java
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/owl/java/test/org/apache/hadoop/owl/client/MultiplePartitionIntervalTest.java?rev=934649view=auto
==
--- 
hadoop/pig/trunk/contrib/owl/java/test/org/apache/hadoop/owl/client/MultiplePartitionIntervalTest.java
 (added)
+++ 
hadoop/pig/trunk/contrib/owl/java/test/org/apache/hadoop/owl/client/MultiplePartitionIntervalTest.java
 Thu Apr 15 23:56:44 2010
@@ -0,0 +1,750 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.owl.client;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.Date;
+import org.apache.hadoop.owl.OwlTestCase;
+import org.apache.hadoop.owl.common.OwlException;
+import org.apache.hadoop.owl.common.OwlUtil;
+import org.apache.hadoop.owl.client.CompleteSanityTest;
+import org.apache.hadoop.owl.protocol.OwlDataElement;
+import org.apache.hadoop.owl.protocol.OwlKeyValue;
+import org.apache.hadoop.owl.protocol.OwlObject;
+import org.apache.hadoop.owl.protocol.OwlPartitionKey;
+import org.apache.hadoop.owl.protocol.OwlPartitionProperty;
+import org.apache.hadoop.owl.protocol.OwlPropertyKey;
+import org.apache.hadoop.owl.protocol.OwlResultObject;
+import org.apache.hadoop.owl.protocol.OwlTable;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MultiplePartitionIntervalTest extends OwlTestCase {
+// private int counter = 0;
+private static OwlClient client;
+private CompleteSanityTest csit;
+private MultiplePartitionTest mpit;
+
+public MultiplePartitionIntervalTest() {
+client = new OwlClient(getUri());
+this.csit = new CompleteSanityTest();
+this.mpit = new MultiplePartitionTest();
+}
+
+@Before
+public void testInitialize() {
+}
+
+public void createMultiplePartitionedIntervalOwlTable(String owlTableName,
+String databaseName, String propertyKeyName1, String type1,
+String propertyKeyName2, String type2, String ptnType,
+String partitionKey1, String partitionKey1Type1,
+String partitionKey2, String partitionKey2Type2,
+String part1PropertyKey1, String part1PropertyKey1Type1,
+String part1PropertyKey2, String part1PropertyKey2Type2,
+String part2PropertyKey1, String part2PropertyKey1Type1,
+String part2PropertyKey2, String part2PropertyKey2Type2)
+throws OwlException {
+
+System.out.println(Owl Table name  + owlTableName
++  within owldatabase  + databaseName);
+
+String testCmd = create owltable type basic  + owlTableName
++  within owldatabase  + databaseName +  define property key 
++ propertyKeyName1 +  :  + type1 +  ,  + propertyKeyName2
++  :  + type2 +  partitioned by  + ptnType
++  with partition key  + partitionKey1
++  define property key  + part1PropertyKey1 +  : 
++ part1PropertyKey1Type1 +  ,  + part1PropertyKey2 +  : 
++ part1PropertyKey2Type2
++  partitioned by LIST with partition key  + partitionKey2
++  :  + partitionKey2Type2 +  define property key 
++ part2PropertyKey1 +  :  + part2PropertyKey1Type1 +  ,
++ part2PropertyKey2 +  :  + part2PropertyKey2Type2
++  schema \f1:int\;
+
+System.out.println(testCmd);
+client.execute(testCmd);
+mpit.verifyCreateMultiplePartitionedOwlTable(owlTableName, 
databaseName,
+propertyKeyName1, type1, propertyKeyName2, type2,
+partitionKey1, partitionKey1Type1, partitionKey2,
+partitionKey2Type2, part1PropertyKey1, part1PropertyKey1Type1,
+part1PropertyKey2, part1PropertyKey2Type2, part2PropertyKey1,
+part2PropertyKey1Type1, part2PropertyKey2,
+part2PropertyKey2Type2);
+}
+
+public void publishDataElementToMultiplePartitionedIntervalOwlTable(
+String owlTableName, String databaseName, String propertyKeyName1,
+int 

svn commit: r934649 [24/24] - in /hadoop/pig/trunk/contrib: ./ owl/ owl/bin/ owl/ci/ owl/ci/test_results/ owl/ci/test_scripts/ owl/docs/ owl/ivy/ owl/java/ owl/java/lib/ owl/java/main/ owl/java/main/M

2010-04-15 Thread gates
Added: hadoop/pig/trunk/contrib/owl/setup/oracle/orm.xml
URL: 
http://svn.apache.org/viewvc/hadoop/pig/trunk/contrib/owl/setup/oracle/orm.xml?rev=934649view=auto
==
--- hadoop/pig/trunk/contrib/owl/setup/oracle/orm.xml (added)
+++ hadoop/pig/trunk/contrib/owl/setup/oracle/orm.xml Thu Apr 15 23:56:44 2010
@@ -0,0 +1,464 @@
+?xml version=1.0 encoding=UTF-8 ?
+
+!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the License); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an AS IS BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+--
+ !--  This orm is for oracle --
+
+entity-mappings xmlns=http://java.sun.com/xml/ns/persistence/orm;
+xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+xsi:schemaLocation=http://java.sun.com/xml/ns/persistence/orm 
http://java.sun.com/xml/ns/persistence/orm_2_0.xsd;
+version=2.0
+descriptionJPA Mapping file for Owl with JPA/description
+packageorg.apache.hadoop.owl.orm/package
+
+entity class=org.apache.hadoop.owl.entity.DatabaseEntity 
name=DatabaseEntity
+table name=owl_database/
+attributes
+id name=id
+column name=odb_id/
+generated-value strategy=AUTO/
+/id
+basic name=name
+column name=odb_name length=255/
+/basic
+basic name=description
+column name=odb_description length=255/
+/basic
+basic name=owner
+column name=odb_owner length=255/
+/basic
+basic name=location
+column name=odb_location length=750/
+/basic
+basic name=createdAt
+column name=odb_createdat/
+/basic
+basic name=lastModifiedAt
+column name=odb_lastmodified/
+/basic
+basic name=version
+column name=odb_version/
+/basic
+/attributes
+/entity
+
+entity class=org.apache.hadoop.owl.entity.OwlTableEntity 
name=OwlTableEntity
+table name=owl_table/
+attributes
+id name=id
+column name=ot_id/
+generated-value strategy=AUTO/
+/id
+basic name=databaseId
+column name=ot_database_id/
+/basic
+basic name=name
+column name=ot_name length=255/
+/basic
+basic name=description
+column name=ot_description length=255/
+/basic
+basic name=location
+column name=ot_location length=750/
+/basic
+basic name=owner
+column name=ot_owner length=255/
+/basic
+basic name=createdAt
+column name=ot_createdat/
+/basic
+basic name=lastModifiedAt
+column name=ot_lastmodified/
+/basic
+basic name=version
+column name=ot_version/
+/basic
+basic name=schemaId
+column name=ot_schemaid/
+/basic
+basic name=loader
+column name=ot_loader/
+/basic
+one-to-many name=partitionKeys 
target-entity=org.apache.hadoop.owl.entity.PartitionKeyEntity 
mapped-by=owlTable
+join-column name=pak_owltable_id/
+cascade
+cascade-all/
+/cascade
+/one-to-many
+
+one-to-many name=propertyKeys 
target-entity=org.apache.hadoop.owl.entity.PropertyKeyEntity 
mapped-by=owlTable
+join-column name=prk_owltable_id/
+cascade
+cascade-all/
+/cascade
+/one-to-many
+
+one-to-many name=keyValues 
target-entity=org.apache.hadoop.owl.entity.OwlTableKeyValueEntity 
mapped-by=owlTable
+join-column name=otkv_owltable_id/
+cascade
+cascade-all/
+/cascade
+/one-to-many
+
+/attributes
+/entity
+
+entity class=org.apache.hadoop.owl.entity.PartitionEntity 
name=PartitionEntity
+table name=owl_partition/
+attributes
+id name=id