Apache-Phoenix | Master | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1268 Misleading exception when attempting to drop system table



Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1268 Misleading exception when attempting to drop system table



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1268 Misleading exception when attempting to drop system table



git commit: PHOENIX-1268 Misleading exception when attempting to drop system table

2014-09-21 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 598bc323f -> d88f3571f


PHOENIX-1268 Misleading exception when attempting to drop system table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d88f3571
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d88f3571
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d88f3571

Branch: refs/heads/master
Commit: d88f3571fd4f7b5e0fbb30c8c944179ea006709e
Parents: 598bc32
Author: James Taylor 
Authored: Sun Sep 21 13:44:12 2014 -0700
Committer: James Taylor 
Committed: Sun Sep 21 13:47:19 2014 -0700

--
 .../org/apache/phoenix/end2end/AlterTableIT.java  | 18 ++
 phoenix-core/src/main/antlr3/PhoenixSQL.g |  2 +-
 2 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d88f3571/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 9e6911c..7297afc 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -108,6 +108,24 @@ public class AlterTableIT extends BaseHBaseManagedTimeIT {
 }
 
 @Test
+public void testDropSystemTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+
+try {
+try {
+conn.createStatement().executeUpdate(
+"DROP TABLE " + 
PhoenixDatabaseMetaData.SYSTEM_CATALOG);
+fail("Should not be allowed to drop a system table");
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
+}
+} finally {
+conn.close();
+}
+}
+
+@Test
 public void testAddVarCharColToPK() throws Exception {
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 Connection conn = DriverManager.getConnection(getUrl(), props);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/d88f3571/phoenix-core/src/main/antlr3/PhoenixSQL.g
--
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g 
b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index 42b8a98..6264648 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -471,7 +471,7 @@ column_names returns [List ret]
 // Parse a drop table statement.
 drop_table_node returns [DropTableStatement ret]
 :   DROP (v=VIEW | TABLE) (IF ex=EXISTS)? t=from_table_name (c=CASCADE)?
-{ret = factory.dropTable(t, v==null ? PTableType.TABLE : 
PTableType.VIEW, ex!=null, c!=null); }
+{ret = factory.dropTable(t, v==null ? 
(QueryConstants.SYSTEM_SCHEMA_NAME.equals(t.getSchemaName()) ? 
PTableType.SYSTEM : PTableType.TABLE) : PTableType.VIEW, ex!=null, c!=null); }
 ;
 
 // Parse a drop index statement



git commit: PHOENIX-1268 Misleading exception when attempting to drop system table

2014-09-21 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 2ccb62d18 -> 757738397


PHOENIX-1268 Misleading exception when attempting to drop system table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/75773839
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/75773839
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/75773839

Branch: refs/heads/4.0
Commit: 75773839710d778badd1fa385c460598cdc165ce
Parents: 2ccb62d
Author: James Taylor 
Authored: Sun Sep 21 13:44:12 2014 -0700
Committer: James Taylor 
Committed: Sun Sep 21 13:45:42 2014 -0700

--
 .../org/apache/phoenix/end2end/AlterTableIT.java  | 18 ++
 phoenix-core/src/main/antlr3/PhoenixSQL.g |  2 +-
 2 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/75773839/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index 9e6911c..7297afc 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -108,6 +108,24 @@ public class AlterTableIT extends BaseHBaseManagedTimeIT {
 }
 
 @Test
+public void testDropSystemTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+
+try {
+try {
+conn.createStatement().executeUpdate(
+"DROP TABLE " + 
PhoenixDatabaseMetaData.SYSTEM_CATALOG);
+fail("Should not be allowed to drop a system table");
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
+}
+} finally {
+conn.close();
+}
+}
+
+@Test
 public void testAddVarCharColToPK() throws Exception {
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 Connection conn = DriverManager.getConnection(getUrl(), props);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/75773839/phoenix-core/src/main/antlr3/PhoenixSQL.g
--
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g 
b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index 42b8a98..6264648 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -471,7 +471,7 @@ column_names returns [List ret]
 // Parse a drop table statement.
 drop_table_node returns [DropTableStatement ret]
 :   DROP (v=VIEW | TABLE) (IF ex=EXISTS)? t=from_table_name (c=CASCADE)?
-{ret = factory.dropTable(t, v==null ? PTableType.TABLE : 
PTableType.VIEW, ex!=null, c!=null); }
+{ret = factory.dropTable(t, v==null ? 
(QueryConstants.SYSTEM_SCHEMA_NAME.equals(t.getSchemaName()) ? 
PTableType.SYSTEM : PTableType.TABLE) : PTableType.VIEW, ex!=null, c!=null); }
 ;
 
 // Parse a drop index statement



[2/2] git commit: Merge branch '3.0' of https://git-wip-us.apache.org/repos/asf/phoenix into 3.0

2014-09-21 Thread jamestaylor
Merge branch '3.0' of https://git-wip-us.apache.org/repos/asf/phoenix into 3.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d8766cf7
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d8766cf7
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d8766cf7

Branch: refs/heads/3.0
Commit: d8766cf77cd76d6e5977f3fef812d9bbea0a73cd
Parents: ba81f7c 8eaf08e
Author: James Taylor 
Authored: Sun Sep 21 13:44:31 2014 -0700
Committer: James Taylor 
Committed: Sun Sep 21 13:44:31 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   |  56 +
 .../phoenix/pig/PhoenixHBaseStorerIT.java   | 115 +++
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  |  21 +---
 .../phoenix/pig/hadoop/PhoenixRecordReader.java |  11 +-
 .../org/apache/phoenix/pig/util/TypeUtil.java   |  48 
 5 files changed, 206 insertions(+), 45 deletions(-)
--




[1/2] git commit: PHOENIX-1268 Misleading exception when attempting to drop system table

2014-09-21 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 8eaf08e09 -> d8766cf77


PHOENIX-1268 Misleading exception when attempting to drop system table


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ba81f7c2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ba81f7c2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ba81f7c2

Branch: refs/heads/3.0
Commit: ba81f7c2561343c9c5e4dede0407aa588c13835a
Parents: 48a2482
Author: James Taylor 
Authored: Sun Sep 21 13:44:12 2014 -0700
Committer: James Taylor 
Committed: Sun Sep 21 13:44:12 2014 -0700

--
 .../org/apache/phoenix/end2end/AlterTableIT.java  | 18 ++
 phoenix-core/src/main/antlr3/PhoenixSQL.g |  2 +-
 2 files changed, 19 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ba81f7c2/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
index ecdee66..ee96304 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java
@@ -108,6 +108,24 @@ public class AlterTableIT extends BaseHBaseManagedTimeIT {
 }
 
 @Test
+public void testDropSystemTable() throws Exception {
+Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+Connection conn = DriverManager.getConnection(getUrl(), props);
+
+try {
+try {
+conn.createStatement().executeUpdate(
+"DROP TABLE " + 
PhoenixDatabaseMetaData.SYSTEM_CATALOG);
+fail("Should not be allowed to drop a system table");
+} catch (SQLException e) {
+
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
+}
+} finally {
+conn.close();
+}
+}
+
+@Test
 public void testAddVarCharColToPK() throws Exception {
 Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
 Connection conn = DriverManager.getConnection(getUrl(), props);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ba81f7c2/phoenix-core/src/main/antlr3/PhoenixSQL.g
--
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g 
b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index 6cc8d57..a900628 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -471,7 +471,7 @@ column_names returns [List ret]
 // Parse a drop table statement.
 drop_table_node returns [DropTableStatement ret]
 :   DROP (v=VIEW | TABLE) (IF ex=EXISTS)? t=from_table_name (c=CASCADE)?
-{ret = factory.dropTable(t, v==null ? PTableType.TABLE : 
PTableType.VIEW, ex!=null, c!=null); }
+{ret = factory.dropTable(t, v==null ? 
(QueryConstants.SYSTEM_SCHEMA_NAME.equals(t.getSchemaName()) ? 
PTableType.SYSTEM : PTableType.TABLE) : PTableType.VIEW, ex!=null, c!=null); }
 ;
 
 // Parse a drop index statement



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[maghamravikiran] Phoenix 1044 : Changes to PhoenixRecordReader to fix scan.

[maghamravikiran] Phoenix 1050 : Support for DataByteArray



Apache-Phoenix | Master | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
Master branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-master-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-master-hadoop1/lastCompletedBuild/testReport/

Changes
[maghamravikiran] Phoenix-1044 : Changes to PhoenixRecordReader to fix the scan.

[maghamravikiran] Phoenix 1050 : Support for DataByteArray.



Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-09-21 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[maghamravikiran] Phoenix 1040 : Changes to PhoenixRecordReader to fix the scan

[maghamravikiran] Phoenix 1050 : Support for DataByteArray



[2/2] git commit: Phoenix 1050 : Support for DataByteArray

2014-09-21 Thread ravimagham
Phoenix 1050 : Support for DataByteArray


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8eaf08e0
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8eaf08e0
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8eaf08e0

Branch: refs/heads/3.0
Commit: 8eaf08e0999a7407d9f33fd0771935c01e4edcd5
Parents: b9da2dc
Author: mravi 
Authored: Sun Sep 21 10:33:01 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:33:01 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseStorerIT.java   | 115 +++
 .../org/apache/phoenix/pig/util/TypeUtil.java   |  48 
 2 files changed, 139 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8eaf08e0/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
index 1d82362..e0021d9 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
@@ -23,6 +23,7 @@ import static 
org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.apache.phoenix.util.TestUtil.LOCALHOST;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertArrayEquals;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -30,6 +31,7 @@ import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.Collection;
 
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.end2end.HBaseManagedTimeTest;
 import org.apache.pig.ExecType;
@@ -37,8 +39,10 @@ import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
 import org.apache.pig.builtin.mock.Storage;
 import org.apache.pig.builtin.mock.Storage.Data;
+import org.apache.pig.data.DataByteArray;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.data.TupleFactory;
+import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -182,4 +186,115 @@ public class PhoenixHBaseStorerIT extends 
BaseHBaseManagedTimeIT {
 assertEquals(0, rs.getInt(3));
 }
 }
+
+/**
+ * Test storage of DataByteArray columns to Phoenix
+ * Maps the DataByteArray with the target PhoenixDataType and persists in 
HBase. 
+* @throws Exception
+ */
+@Test
+public void testStoreWithBinaryDataTypes() throws Exception {
+ 
+   final String tableName = "TABLE3";
+final Statement stmt = conn.createStatement();
+
+stmt.execute("CREATE TABLE " + tableName +
+" (col1 BIGINT NOT NULL, col2 INTEGER , col3 FLOAT, col4 
DOUBLE , col5 TINYINT , " +
+"  col6 BOOLEAN , col7 VARBINARY CONSTRAINT my_pk PRIMARY KEY 
(col1))");
+
+final Data data = Storage.resetData(pigServer);
+final Collection list = Lists.newArrayList();
+
+int rows = 10;
+for (int i = 1; i <= rows; i++) {
+Tuple t = tupleFactory.newTuple();
+t.append(i);
+t.append(new DataByteArray(Bytes.toBytes(i * 5)));
+t.append(new DataByteArray(Bytes.toBytes(i * 10.0F)));
+t.append(new DataByteArray(Bytes.toBytes(i * 15.0D)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+t.append(new DataByteArray(Bytes.toBytes( i % 2 == 0)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+list.add(t);
+}
+data.set("in", 
"col1:int,col2:bytearray,col3:bytearray,col4:bytearray,col5:bytearray,col6:bytearray,col7:bytearray
 ", list);
+
+pigServer.setBatchOn();
+pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
+
+pigServer.registerQuery("Store A into 'hbase://" + tableName
+   + "' using " + 
PhoenixHBaseStorage.class.getName() + "('"
++ zkQuorum + "', '-batchSize 1000');");
+
+if (pigServer.executeBatch().get(0).getStatus() != 
JOB_STATUS.COMPLETED) {
+throw new RuntimeException("Job failed", pigServer.executeBatch()
+.get(0).getException());
+}
+
+final ResultSet rs = stmt
+.executeQuery(String.format("SELECT col1 , col2 , col3 , col4 
, col5 , col6, col7  FROM %s ORDER BY col1" , tableName));
+
+int count = 0;
+for (int i = 1; i <= rows; i++) {
+assertTrue(rs.next());
+assertEquals(i, rs.getInt(1

[1/2] git commit: Phoenix 1044 : Changes to PhoenixRecordReader to fix scan.

2014-09-21 Thread ravimagham
Repository: phoenix
Updated Branches:
  refs/heads/3.0 48a2482b4 -> 8eaf08e09


Phoenix 1044 : Changes to PhoenixRecordReader to fix scan.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b9da2dc6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b9da2dc6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b9da2dc6

Branch: refs/heads/3.0
Commit: b9da2dc623799d386218a59119fc97ea1feb24b9
Parents: 48a2482
Author: mravi 
Authored: Sun Sep 21 10:32:33 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:32:33 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 56 
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  | 21 +++-
 .../phoenix/pig/hadoop/PhoenixRecordReader.java | 11 ++--
 3 files changed, 67 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/b9da2dc6/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 8daea9c..6017065 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -307,6 +307,62 @@ public class PhoenixHBaseLoaderIT {
 }
 
 /**
+ * 
+ * @throws Exception
+ */
+@Test
+public void testForNonPKSQLQuery() throws Exception {
+
+ //create the table
+ String ddl = "CREATE TABLE  " + TABLE_FULL_NAME 
++ " ( ID VARCHAR PRIMARY KEY, FOO VARCHAR, BAR INTEGER, BAZ 
UNSIGNED_INT)";
+
+conn.createStatement().execute(ddl);
+
+//upsert data.
+final String dml = "UPSERT INTO " + TABLE_FULL_NAME + " 
VALUES(?,?,?,?) ";
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.setString(1, "a");
+stmt.setString(2, "a");
+stmt.setInt(3,-1);
+stmt.setInt(4,1);
+stmt.execute();
+stmt.setString(1, "b");
+stmt.setString(2, "b");
+stmt.setInt(3,-2);
+stmt.setInt(4,2);
+stmt.execute();
+
+conn.commit();
+
+//sql query
+final String sqlQuery = String.format(" SELECT FOO, BAZ FROM %s WHERE 
BAR = -1 " , TABLE_FULL_NAME);
+  
+pigServer.registerQuery(String.format(
+"A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery,
+zkQuorum));
+
+final Iterator iterator = pigServer.openIterator("A");
+int recordsRead = 0;
+while (iterator.hasNext()) {
+final Tuple tuple = iterator.next();
+assertEquals("a", tuple.get(0));
+assertEquals(1, tuple.get(1));
+recordsRead++;
+}
+assertEquals(1, recordsRead);
+
+//test the schema. Test for PHOENIX-1123
+Schema schema = pigServer.dumpSchema("A");
+List fields = schema.getFields();
+assertEquals(2, fields.size());
+assertTrue(fields.get(0).alias.equalsIgnoreCase("FOO"));
+assertTrue(fields.get(0).type == DataType.CHARARRAY);
+assertTrue(fields.get(1).alias.equalsIgnoreCase("BAZ"));
+assertTrue(fields.get(1).type == DataType.INTEGER);
+}
+
+/**
  * @throws Exception
  */
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/b9da2dc6/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
index ebb9023..4326876 100644
--- 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
+++ 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.pig.PhoenixPigConfiguration;
 import org.apache.phoenix.query.KeyRange;
@@ -46,7 +45,6 @@ import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
 import com.google.common.collect.Lists;
 
 /**

[2/2] git commit: Phoenix 1050 : Support for DataByteArray

2014-09-21 Thread ravimagham
Phoenix 1050 : Support for DataByteArray


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2ccb62d1
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2ccb62d1
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2ccb62d1

Branch: refs/heads/4.0
Commit: 2ccb62d181afb3e54fe4940f78490f858b9a710a
Parents: 27b3865
Author: mravi 
Authored: Sun Sep 21 10:27:51 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:27:51 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseStorerIT.java   | 115 +++
 .../org/apache/phoenix/pig/util/TypeUtil.java   |  48 
 2 files changed, 139 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2ccb62d1/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
index 1d82362..e0021d9 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
@@ -23,6 +23,7 @@ import static 
org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.apache.phoenix.util.TestUtil.LOCALHOST;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertArrayEquals;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -30,6 +31,7 @@ import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.Collection;
 
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.end2end.HBaseManagedTimeTest;
 import org.apache.pig.ExecType;
@@ -37,8 +39,10 @@ import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
 import org.apache.pig.builtin.mock.Storage;
 import org.apache.pig.builtin.mock.Storage.Data;
+import org.apache.pig.data.DataByteArray;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.data.TupleFactory;
+import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -182,4 +186,115 @@ public class PhoenixHBaseStorerIT extends 
BaseHBaseManagedTimeIT {
 assertEquals(0, rs.getInt(3));
 }
 }
+
+/**
+ * Test storage of DataByteArray columns to Phoenix
+ * Maps the DataByteArray with the target PhoenixDataType and persists in 
HBase. 
+* @throws Exception
+ */
+@Test
+public void testStoreWithBinaryDataTypes() throws Exception {
+ 
+   final String tableName = "TABLE3";
+final Statement stmt = conn.createStatement();
+
+stmt.execute("CREATE TABLE " + tableName +
+" (col1 BIGINT NOT NULL, col2 INTEGER , col3 FLOAT, col4 
DOUBLE , col5 TINYINT , " +
+"  col6 BOOLEAN , col7 VARBINARY CONSTRAINT my_pk PRIMARY KEY 
(col1))");
+
+final Data data = Storage.resetData(pigServer);
+final Collection list = Lists.newArrayList();
+
+int rows = 10;
+for (int i = 1; i <= rows; i++) {
+Tuple t = tupleFactory.newTuple();
+t.append(i);
+t.append(new DataByteArray(Bytes.toBytes(i * 5)));
+t.append(new DataByteArray(Bytes.toBytes(i * 10.0F)));
+t.append(new DataByteArray(Bytes.toBytes(i * 15.0D)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+t.append(new DataByteArray(Bytes.toBytes( i % 2 == 0)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+list.add(t);
+}
+data.set("in", 
"col1:int,col2:bytearray,col3:bytearray,col4:bytearray,col5:bytearray,col6:bytearray,col7:bytearray
 ", list);
+
+pigServer.setBatchOn();
+pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
+
+pigServer.registerQuery("Store A into 'hbase://" + tableName
+   + "' using " + 
PhoenixHBaseStorage.class.getName() + "('"
++ zkQuorum + "', '-batchSize 1000');");
+
+if (pigServer.executeBatch().get(0).getStatus() != 
JOB_STATUS.COMPLETED) {
+throw new RuntimeException("Job failed", pigServer.executeBatch()
+.get(0).getException());
+}
+
+final ResultSet rs = stmt
+.executeQuery(String.format("SELECT col1 , col2 , col3 , col4 
, col5 , col6, col7  FROM %s ORDER BY col1" , tableName));
+
+int count = 0;
+for (int i = 1; i <= rows; i++) {
+assertTrue(rs.next());
+assertEquals(i, rs.getInt(1

[1/2] git commit: Phoenix 1040 : Changes to PhoenixRecordReader to fix the scan

2014-09-21 Thread ravimagham
Repository: phoenix
Updated Branches:
  refs/heads/4.0 987acf6d1 -> 2ccb62d18


Phoenix 1040 : Changes to PhoenixRecordReader to fix the scan


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/27b3865c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/27b3865c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/27b3865c

Branch: refs/heads/4.0
Commit: 27b3865c27b928d229bfb92d7f2b5eece0188321
Parents: 987acf6
Author: mravi 
Authored: Sun Sep 21 10:27:11 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:27:11 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 57 
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  | 19 ++-
 .../phoenix/pig/hadoop/PhoenixRecordReader.java |  7 ++-
 3 files changed, 65 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/27b3865c/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 8daea9c..d82e6b0 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -307,6 +307,63 @@ public class PhoenixHBaseLoaderIT {
 }
 
 /**
+ * 
+ * @throws Exception
+ */
+@Test
+public void testForNonPKSQLQuery() throws Exception {
+
+ //create the table
+ String ddl = "CREATE TABLE  " + TABLE_FULL_NAME 
++ " ( ID VARCHAR PRIMARY KEY, FOO VARCHAR, BAR INTEGER, BAZ 
UNSIGNED_INT)";
+
+conn.createStatement().execute(ddl);
+
+//upsert data.
+final String dml = "UPSERT INTO " + TABLE_FULL_NAME + " 
VALUES(?,?,?,?) ";
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.setString(1, "a");
+stmt.setString(2, "a");
+stmt.setInt(3,-1);
+stmt.setInt(4,1);
+stmt.execute();
+   
+stmt.setString(1, "b");
+stmt.setString(2, "b");
+stmt.setInt(3,-2);
+stmt.setInt(4,2);
+stmt.execute();
+
+conn.commit();
+
+//sql query
+final String sqlQuery = String.format(" SELECT FOO, BAZ FROM %s WHERE 
BAR = -1 " , TABLE_FULL_NAME);
+  
+pigServer.registerQuery(String.format(
+"A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery,
+zkQuorum));
+
+final Iterator iterator = pigServer.openIterator("A");
+int recordsRead = 0;
+while (iterator.hasNext()) {
+final Tuple tuple = iterator.next();
+assertEquals("a", tuple.get(0));
+assertEquals(1, tuple.get(1));
+recordsRead++;
+}
+assertEquals(1, recordsRead);
+
+//test the schema. Test for PHOENIX-1123
+Schema schema = pigServer.dumpSchema("A");
+List fields = schema.getFields();
+assertEquals(2, fields.size());
+assertTrue(fields.get(0).alias.equalsIgnoreCase("FOO"));
+assertTrue(fields.get(0).type == DataType.CHARARRAY);
+assertTrue(fields.get(1).alias.equalsIgnoreCase("BAZ"));
+assertTrue(fields.get(1).type == DataType.INTEGER);
+}
+
+/**
  * @throws Exception
  */
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/27b3865c/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
index ebb9023..6899099 100644
--- 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
+++ 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.pig.PhoenixPigConfiguration;
 import org.apache.phoenix.query.KeyRange;
@@ -46,7 +45,6 @@ import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
 import com.google.common.collect.List

[2/2] git commit: Phoenix 1050 : Support for DataByteArray.

2014-09-21 Thread ravimagham
Phoenix 1050 : Support for DataByteArray.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/598bc323
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/598bc323
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/598bc323

Branch: refs/heads/master
Commit: 598bc323f49c8d0413c45019fafcbe94036cf67a
Parents: f2c9bba
Author: mravi 
Authored: Sun Sep 21 10:17:26 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:17:26 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseStorerIT.java   | 115 +++
 .../org/apache/phoenix/pig/util/TypeUtil.java   |  48 
 2 files changed, 139 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/598bc323/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
index 1d82362..e0021d9 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseStorerIT.java
@@ -23,6 +23,7 @@ import static 
org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
 import static org.apache.phoenix.util.TestUtil.LOCALHOST;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertArrayEquals;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
@@ -30,6 +31,7 @@ import java.sql.ResultSet;
 import java.sql.Statement;
 import java.util.Collection;
 
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
 import org.apache.phoenix.end2end.HBaseManagedTimeTest;
 import org.apache.pig.ExecType;
@@ -37,8 +39,10 @@ import org.apache.pig.PigServer;
 import org.apache.pig.backend.executionengine.ExecJob.JOB_STATUS;
 import org.apache.pig.builtin.mock.Storage;
 import org.apache.pig.builtin.mock.Storage.Data;
+import org.apache.pig.data.DataByteArray;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.data.TupleFactory;
+import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -182,4 +186,115 @@ public class PhoenixHBaseStorerIT extends 
BaseHBaseManagedTimeIT {
 assertEquals(0, rs.getInt(3));
 }
 }
+
+/**
+ * Test storage of DataByteArray columns to Phoenix
+ * Maps the DataByteArray with the target PhoenixDataType and persists in 
HBase. 
+* @throws Exception
+ */
+@Test
+public void testStoreWithBinaryDataTypes() throws Exception {
+ 
+   final String tableName = "TABLE3";
+final Statement stmt = conn.createStatement();
+
+stmt.execute("CREATE TABLE " + tableName +
+" (col1 BIGINT NOT NULL, col2 INTEGER , col3 FLOAT, col4 
DOUBLE , col5 TINYINT , " +
+"  col6 BOOLEAN , col7 VARBINARY CONSTRAINT my_pk PRIMARY KEY 
(col1))");
+
+final Data data = Storage.resetData(pigServer);
+final Collection list = Lists.newArrayList();
+
+int rows = 10;
+for (int i = 1; i <= rows; i++) {
+Tuple t = tupleFactory.newTuple();
+t.append(i);
+t.append(new DataByteArray(Bytes.toBytes(i * 5)));
+t.append(new DataByteArray(Bytes.toBytes(i * 10.0F)));
+t.append(new DataByteArray(Bytes.toBytes(i * 15.0D)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+t.append(new DataByteArray(Bytes.toBytes( i % 2 == 0)));
+t.append(new DataByteArray(Bytes.toBytes(i)));
+list.add(t);
+}
+data.set("in", 
"col1:int,col2:bytearray,col3:bytearray,col4:bytearray,col5:bytearray,col6:bytearray,col7:bytearray
 ", list);
+
+pigServer.setBatchOn();
+pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();");
+
+pigServer.registerQuery("Store A into 'hbase://" + tableName
+   + "' using " + 
PhoenixHBaseStorage.class.getName() + "('"
++ zkQuorum + "', '-batchSize 1000');");
+
+if (pigServer.executeBatch().get(0).getStatus() != 
JOB_STATUS.COMPLETED) {
+throw new RuntimeException("Job failed", pigServer.executeBatch()
+.get(0).getException());
+}
+
+final ResultSet rs = stmt
+.executeQuery(String.format("SELECT col1 , col2 , col3 , col4 
, col5 , col6, col7  FROM %s ORDER BY col1" , tableName));
+
+int count = 0;
+for (int i = 1; i <= rows; i++) {
+assertTrue(rs.next());
+assertEquals(i, rs.getI

[1/2] git commit: Phoenix-1044 : Changes to PhoenixRecordReader to fix the scan.

2014-09-21 Thread ravimagham
Repository: phoenix
Updated Branches:
  refs/heads/master 2fe9be740 -> 598bc323f


Phoenix-1044 : Changes to PhoenixRecordReader to fix the scan.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f2c9bbab
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f2c9bbab
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f2c9bbab

Branch: refs/heads/master
Commit: f2c9bbab0bd2297a06a16962c8641b50c3472eef
Parents: 2fe9be7
Author: mravi 
Authored: Sun Sep 21 10:15:19 2014 -0700
Committer: mravi 
Committed: Sun Sep 21 10:15:19 2014 -0700

--
 .../phoenix/pig/PhoenixHBaseLoaderIT.java   | 56 
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  | 21 +++-
 .../phoenix/pig/hadoop/PhoenixRecordReader.java | 11 ++--
 3 files changed, 67 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2c9bbab/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
--
diff --git 
a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java 
b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 8daea9c..6017065 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -307,6 +307,62 @@ public class PhoenixHBaseLoaderIT {
 }
 
 /**
+ * 
+ * @throws Exception
+ */
+@Test
+public void testForNonPKSQLQuery() throws Exception {
+
+ //create the table
+ String ddl = "CREATE TABLE  " + TABLE_FULL_NAME 
++ " ( ID VARCHAR PRIMARY KEY, FOO VARCHAR, BAR INTEGER, BAZ 
UNSIGNED_INT)";
+
+conn.createStatement().execute(ddl);
+
+//upsert data.
+final String dml = "UPSERT INTO " + TABLE_FULL_NAME + " 
VALUES(?,?,?,?) ";
+PreparedStatement stmt = conn.prepareStatement(dml);
+stmt.setString(1, "a");
+stmt.setString(2, "a");
+stmt.setInt(3,-1);
+stmt.setInt(4,1);
+stmt.execute();
+stmt.setString(1, "b");
+stmt.setString(2, "b");
+stmt.setInt(3,-2);
+stmt.setInt(4,2);
+stmt.execute();
+
+conn.commit();
+
+//sql query
+final String sqlQuery = String.format(" SELECT FOO, BAZ FROM %s WHERE 
BAR = -1 " , TABLE_FULL_NAME);
+  
+pigServer.registerQuery(String.format(
+"A = load 'hbase://query/%s' using 
org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery,
+zkQuorum));
+
+final Iterator iterator = pigServer.openIterator("A");
+int recordsRead = 0;
+while (iterator.hasNext()) {
+final Tuple tuple = iterator.next();
+assertEquals("a", tuple.get(0));
+assertEquals(1, tuple.get(1));
+recordsRead++;
+}
+assertEquals(1, recordsRead);
+
+//test the schema. Test for PHOENIX-1123
+Schema schema = pigServer.dumpSchema("A");
+List fields = schema.getFields();
+assertEquals(2, fields.size());
+assertTrue(fields.get(0).alias.equalsIgnoreCase("FOO"));
+assertTrue(fields.get(0).type == DataType.CHARARRAY);
+assertTrue(fields.get(1).alias.equalsIgnoreCase("BAZ"));
+assertTrue(fields.get(1).type == DataType.INTEGER);
+}
+
+/**
  * @throws Exception
  */
 @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f2c9bbab/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
--
diff --git 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
index ebb9023..4326876 100644
--- 
a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
+++ 
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.pig.PhoenixPigConfiguration;
 import org.apache.phoenix.query.KeyRange;
@@ -46,7 +45,6 @@ import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
 import com.google.common.collect.List