git commit: PHOENIX-1337 Unpadded fixed length tenant ID causes erroneous results (James Taylor via Ram)

2014-10-13 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/3.0 4c8798d57 -> ed3e3f55f


PHOENIX-1337 Unpadded fixed length tenant ID causes erroneous results
(James Taylor via Ram)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ed3e3f55
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ed3e3f55
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ed3e3f55

Branch: refs/heads/3.0
Commit: ed3e3f55f1e9d91cec8a02882ae4c61ced9e49f0
Parents: 4c8798d
Author: Ramkrishna 
Authored: Mon Oct 13 12:53:26 2014 +0530
Committer: Ramkrishna 
Committed: Mon Oct 13 12:53:26 2014 +0530

--
 .../end2end/TenantSpecificViewIndexIT.java  | 44 
 .../apache/phoenix/compile/DeleteCompiler.java  | 15 +--
 .../phoenix/compile/ProjectionCompiler.java |  2 +-
 .../apache/phoenix/compile/UpsertCompiler.java  | 10 +++--
 .../apache/phoenix/compile/WhereOptimizer.java  |  2 +
 .../java/org/apache/phoenix/util/ScanUtil.java  | 19 +
 6 files changed, 84 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ed3e3f55/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
index 1f8eb55..e7cdc01 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.util.Properties;
 
@@ -121,4 +122,47 @@ public class TenantSpecificViewIndexIT extends 
BaseTenantSpecificViewIndexIT {
 assertFalse(rs.next());
 
 }
+
+@Test
+public void testQueryingUsingTenantSpecific() throws Exception {
+String tenantId1 = "org1";
+String tenantId2 = "org2";
+String ddl = "CREATE TABLE T (tenantId char(15) NOT NULL, pk1 varchar 
NOT NULL, pk2 INTEGER NOT NULL, val1 VARCHAR CONSTRAINT pk primary key 
(tenantId,pk1,pk2)) MULTI_TENANT = true";
+Connection conn = DriverManager.getConnection(getUrl());
+conn.createStatement().execute(ddl);
+String dml = "UPSERT INTO T (tenantId, pk1, pk2, val1) VALUES (?, ?, 
?, ?)";
+PreparedStatement stmt = conn.prepareStatement(dml);
+
+String pk = "pk1b";
+// insert two rows in table T. One for tenantId1 and other for 
tenantId2.
+stmt.setString(1, tenantId1);
+stmt.setString(2, pk);
+stmt.setInt(3, 100);
+stmt.setString(4, "value1");
+stmt.executeUpdate();
+
+stmt.setString(1, tenantId2);
+stmt.setString(2, pk);
+stmt.setInt(3, 200);
+stmt.setString(4, "value2");
+stmt.executeUpdate();
+conn.commit();
+conn.close();
+
+// get a tenant specific url.
+String tenantUrl = getUrl() + ';' + PhoenixRuntime.TENANT_ID_ATTRIB + 
'=' + tenantId1;
+Connection tenantConn = DriverManager.getConnection(tenantUrl);
+
+// create a tenant specific view.
+tenantConn.createStatement().execute("CREATE VIEW V AS select * from 
T");
+String query = "SELECT val1 FROM V WHERE pk1 = ?";
+
+// using the tenant connection query the view.
+PreparedStatement stmt2 = tenantConn.prepareStatement(query);
+stmt2.setString(1, pk); // for tenantId1 the row inserted has pk1 = 
"pk1b"
+ResultSet rs = stmt2.executeQuery();
+assertTrue(rs.next());
+assertEquals("value1", rs.getString(1));
+assertFalse("No other rows should have been returned for the tenant", 
rs.next()); // should have just returned one record since for org1 we have only 
one row.
+}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ed3e3f55/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 469bb30..2fd5535 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -64,6 +64,7 @@ import org.apache.phoenix.schema.MetaDataClient;
 import org.apache.phoenix.schema.MetaDataEntityNotFoundException;
 import org.apache.pho

git commit: PHOENIX-1341 Exit status for PhoenixRuntime

2014-10-13 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/master a11f0d9f2 -> 656acefd1


PHOENIX-1341 Exit status for PhoenixRuntime

Return a non-zero exit status on a failure in PhoenixRuntime.

Signed-off-by: Gabriel Reid 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/656acefd
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/656acefd
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/656acefd

Branch: refs/heads/master
Commit: 656acefd146d515de108f3a757ddb0522ea783d0
Parents: a11f0d9
Author: Brian Johnson 
Authored: Sat Oct 11 14:30:37 2014 -0700
Committer: Gabriel Reid 
Committed: Mon Oct 13 09:01:41 2014 +0200

--
 .../org/apache/phoenix/util/PhoenixRuntime.java | 61 ++--
 1 file changed, 32 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/656acefd/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
index 7de27c2..9d83a60 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
@@ -63,10 +63,10 @@ import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
 /**
- * 
+ *
  * Collection of non JDBC compliant utility methods
  *
- * 
+ *
  * @since 0.1
  */
 public class PhoenixRuntime {
@@ -84,19 +84,19 @@ public class PhoenixRuntime {
 public final static String JDBC_PROTOCOL = "jdbc:phoenix";
 public final static char JDBC_PROTOCOL_TERMINATOR = ';';
 public final static char JDBC_PROTOCOL_SEPARATOR = ':';
-
+
 @Deprecated
 public final static String EMBEDDED_JDBC_PROTOCOL = 
PhoenixRuntime.JDBC_PROTOCOL + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
-
+
 /**
  * Use this connection property to control the number of rows that are
  * batched together on an UPSERT INTO table1... SELECT ... FROM table2.
  * It's only used when autoCommit is true and your source table is
- * different than your target table or your SELECT statement has a 
+ * different than your target table or your SELECT statement has a
  * GROUP BY clause.
  */
 public final static String UPSERT_BATCH_SIZE_ATTRIB = "UpsertBatchSize";
-
+
 /**
  * Use this connection property to help with fairness of resource 
allocation
  * for the client and server. The value of the attribute determines the
@@ -105,7 +105,7 @@ public class PhoenixRuntime {
  * configuration properties
  */
 public static final String TENANT_ID_ATTRIB = "TenantId";
-
+
 /**
  * Use this connection property prefix for annotations that you want to 
show up in traces and log lines emitted by Phoenix.
  * This is useful for annotating connections with information available on 
the client (e.g. user or session identifier) and
@@ -119,7 +119,7 @@ public class PhoenixRuntime {
  * upserting data into them, and getting the uncommitted state through 
{@link #getUncommittedData(Connection)}
  */
 public final static String CONNECTIONLESS = "none";
-
+
 private static final String HEADER_IN_LINE = "in-line";
 private static final String SQL_FILE_EXT = ".sql";
 private static final String CSV_FILE_EXT = ".csv";
@@ -137,6 +137,8 @@ public class PhoenixRuntime {
 ExecutionCommand execCmd = ExecutionCommand.parseArgs(args);
 String jdbcUrl = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
execCmd.getConnectionString();
 
+int exitStatus = 0;
+
 PhoenixConnection conn = null;
 try {
 Properties props = new Properties();
@@ -165,6 +167,7 @@ public class PhoenixRuntime {
 }
 } catch (Throwable t) {
 t.printStackTrace();
+exitStatus = 1;
 } finally {
 if (conn != null) {
 try {
@@ -173,7 +176,7 @@ public class PhoenixRuntime {
 //going to shut jvm down anyway. So might as well feast on 
it.
 }
 }
-System.exit(0);
+System.exit(exitStatus);
 }
 }
 
@@ -181,7 +184,7 @@ public class PhoenixRuntime {
 
 private PhoenixRuntime() {
 }
-
+
 /**
  * Runs a series of semicolon-terminated SQL statements using the 
connection provided, returning
  * the number of SQL statements executed. Note that if the connection has 
specified an SCN through
@@ -200,13 +203,13 @@ public class PhoenixRuntime {
 pconn.setAutoCommit(true);
 return pconn.executeStatements(reader, binds, Syst

git commit: PHOENIX-1341 Exit status for PhoenixRuntime

2014-10-13 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/3.0 ed3e3f55f -> d42866918


PHOENIX-1341 Exit status for PhoenixRuntime

Return a non-zero exit status on a failure in PhoenixRuntime.

Signed-off-by: Gabriel Reid 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/d4286691
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/d4286691
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/d4286691

Branch: refs/heads/3.0
Commit: d42866918448d18e32fbdf3266dd8add7ec1b428
Parents: ed3e3f5
Author: Brian Johnson 
Authored: Sat Oct 11 14:30:37 2014 -0700
Committer: Gabriel Reid 
Committed: Mon Oct 13 09:57:43 2014 +0200

--
 .../src/main/java/org/apache/phoenix/util/PhoenixRuntime.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/d4286691/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
index fdcc02a..064ca62 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
@@ -129,6 +129,8 @@ public class PhoenixRuntime {
 ExecutionCommand execCmd = ExecutionCommand.parseArgs(args);
 String jdbcUrl = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
execCmd.getConnectionString();
 
+int exitStatus = 0;
+
 PhoenixConnection conn = null;
 try {
 Properties props = new Properties();
@@ -157,6 +159,7 @@ public class PhoenixRuntime {
 }
 } catch (Throwable t) {
 t.printStackTrace();
+exitStatus = 1;
 } finally {
 if (conn != null) {
 try {
@@ -165,7 +168,7 @@ public class PhoenixRuntime {
 //going to shut jvm down anyway. So might as well feast on 
it.
 }
 }
-System.exit(0);
+System.exit(exitStatus);
 }
 }
 
@@ -414,7 +417,7 @@ public class PhoenixRuntime {
 output.write(QueryConstants.SEPARATOR_BYTE);
 }
 type = pkColumns.get(i).getDataType();
-
+
 //for fixed width data types like CHAR and BINARY, we need to 
pad values to be of max length.
 Object paddedObj = type.pad(values[i - offset], 
pkColumns.get(i).getMaxLength());
 byte[] value = type.toBytes(paddedObj);



git commit: PHOENIX-1341 Exit status for PhoenixRuntime

2014-10-13 Thread greid
Repository: phoenix
Updated Branches:
  refs/heads/4.0 ab8b17d79 -> debf09265


PHOENIX-1341 Exit status for PhoenixRuntime

Return a non-zero exit status on a failure in PhoenixRuntime.

Signed-off-by: Gabriel Reid 


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/debf0926
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/debf0926
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/debf0926

Branch: refs/heads/4.0
Commit: debf09265a1236d5d020be3eab2e1fb86f97fa28
Parents: ab8b17d
Author: Brian Johnson 
Authored: Sat Oct 11 14:30:37 2014 -0700
Committer: Gabriel Reid 
Committed: Mon Oct 13 09:03:41 2014 +0200

--
 .../org/apache/phoenix/util/PhoenixRuntime.java | 61 ++--
 1 file changed, 32 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/debf0926/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java 
b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
index 9b0ecaf..492e940 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
@@ -65,10 +65,10 @@ import org.apache.phoenix.schema.TableNotFoundException;
 import com.google.common.collect.Lists;
 
 /**
- * 
+ *
  * Collection of non JDBC compliant utility methods
  *
- * 
+ *
  * @since 0.1
  */
 public class PhoenixRuntime {
@@ -86,19 +86,19 @@ public class PhoenixRuntime {
 public final static String JDBC_PROTOCOL = "jdbc:phoenix";
 public final static char JDBC_PROTOCOL_TERMINATOR = ';';
 public final static char JDBC_PROTOCOL_SEPARATOR = ':';
-
+
 @Deprecated
 public final static String EMBEDDED_JDBC_PROTOCOL = 
PhoenixRuntime.JDBC_PROTOCOL + PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR;
-
+
 /**
  * Use this connection property to control the number of rows that are
  * batched together on an UPSERT INTO table1... SELECT ... FROM table2.
  * It's only used when autoCommit is true and your source table is
- * different than your target table or your SELECT statement has a 
+ * different than your target table or your SELECT statement has a
  * GROUP BY clause.
  */
 public final static String UPSERT_BATCH_SIZE_ATTRIB = "UpsertBatchSize";
-
+
 /**
  * Use this connection property to help with fairness of resource 
allocation
  * for the client and server. The value of the attribute determines the
@@ -107,7 +107,7 @@ public class PhoenixRuntime {
  * configuration properties
  */
 public static final String TENANT_ID_ATTRIB = "TenantId";
-
+
 /**
  * Use this connection property prefix for annotations that you want to 
show up in traces and log lines emitted by Phoenix.
  * This is useful for annotating connections with information available on 
the client (e.g. user or session identifier) and
@@ -121,7 +121,7 @@ public class PhoenixRuntime {
  * upserting data into them, and getting the uncommitted state through 
{@link #getUncommittedData(Connection)}
  */
 public final static String CONNECTIONLESS = "none";
-
+
 private static final String HEADER_IN_LINE = "in-line";
 private static final String SQL_FILE_EXT = ".sql";
 private static final String CSV_FILE_EXT = ".csv";
@@ -139,6 +139,8 @@ public class PhoenixRuntime {
 ExecutionCommand execCmd = ExecutionCommand.parseArgs(args);
 String jdbcUrl = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR + 
execCmd.getConnectionString();
 
+int exitStatus = 0;
+
 PhoenixConnection conn = null;
 try {
 Properties props = new Properties();
@@ -167,6 +169,7 @@ public class PhoenixRuntime {
 }
 } catch (Throwable t) {
 t.printStackTrace();
+exitStatus = 1;
 } finally {
 if (conn != null) {
 try {
@@ -175,7 +178,7 @@ public class PhoenixRuntime {
 //going to shut jvm down anyway. So might as well feast on 
it.
 }
 }
-System.exit(0);
+System.exit(exitStatus);
 }
 }
 
@@ -183,7 +186,7 @@ public class PhoenixRuntime {
 
 private PhoenixRuntime() {
 }
-
+
 /**
  * Runs a series of semicolon-terminated SQL statements using the 
connection provided, returning
  * the number of SQL statements executed. Note that if the connection has 
specified an SCN through
@@ -202,13 +205,13 @@ public class PhoenixRuntime {
 pconn.setAutoCommit(true);
 return pconn.executeStatements(reader, binds, S

Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #251

2014-10-13 Thread Apache Jenkins Server
See 

Changes:

[ramkrishna] Phoenix-1333 Store statistics guideposts as VARBINARY (Ramkrishna S

[ramkrishna] PHOENIX-1333 - Store statistics guideposts as VARBINARY (Add 
missing file

[ramkrishna] PHOENIX-1337 Unpadded fixed length tenant ID causes erroneous 
results

[gabrielr] PHOENIX-1341 Exit status for PhoenixRuntime

--
[...truncated 1689 lines...]
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:354)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:130)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:96)
... 8 more

at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:1012)
at 
org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:87)
at com.sun.proxy.$Proxy22.openScanner(Unknown Source)
at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:224)
at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:126)
at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:42)
at 
org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:164)
at 
org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:211)
at 
org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:126)
at 
org.apache.hadoop.hbase.client.ClientScanner.(ClientScanner.java:121)
at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:702)
at 
org.apache.phoenix.iterate.TableResultIterator.(TableResultIterator.java:54)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:583)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:578)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)

Running org.apache.phoenix.end2end.AutoCommitIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.359 sec - in 
org.apache.phoenix.end2end.ServerExceptionIT
Running org.apache.phoenix.end2end.LastValueFunctionIT
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.406 sec - in 
org.apache.phoenix.end2end.AutoCommitIT
Running org.apache.phoenix.end2end.RoundFloorCeilFunctionsEnd2EndIT
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.019 sec - in 
org.apache.phoenix.end2end.LastValueFunctionIT
Running org.apache.phoenix.end2end.LpadFunctionIT
Tests run: 30, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.501 sec - in 
org.apache.phoenix.end2end.RoundFloorCeilFunctionsEnd2EndIT
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.421 sec - in 
org.apache.phoenix.end2end.LpadFunctionIT
Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 96.944 sec - 
in org.apache.phoenix.end2end.HashJoinIT
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 35.209 sec - in 
org.apache.phoenix.end2end.QueryMoreIT

Results :

Tests in error: 
  NthValueFunctionIT.offsetValueAscOrder:74 » PhoenixIO 
org.apache.phoenix.excep...
  NthValueFunctionIT.simpleTest:49 » PhoenixIO 
org.apache.phoenix.exception.Phoe...
  NthValueFunctionIT.offsetValueLastMismatchByColumn:124 » PhoenixIO 
org.apache
  NthValueFunctionIT.testSortOrderInDataColWithOffset:148 » PhoenixIO 
org.apache...
  NthValueFunctionIT.offsetValueDescOrder:99 » PhoenixIO 
org.apache.phoenix.exce...
  FirstValueFunctionIT.allColumnsNull:196 » PhoenixIO 
org.apache.phoenix.excepti...
  FirstValueFunctionIT.floatDataType:171 » PhoenixIO 
org.apache.phoenix.exceptio...
  FirstValueFunctionIT.signedLongAsBigInt:51 » PhoenixIO 
org.apache.phoenix.exce...
  FirstValueFunctionIT.testSortOrderInDataCol:99 » PhoenixIO 
org.apache.phoenix
  FirstValueFunctionIT.doubleDataType:123 » PhoenixIO 
org.apache.phoenix.excepti...
  FirstValueFunctionIT.testSortOrderInSortCol:75 » PhoenixIO 
org.apache.phoenix
  FirstValueFunctionIT.varcharFixedLenghtDatatype:147 » PhoenixIO 
org.apache.pho...
  UpsertBigValuesIT.testIntegerKV:240 » PhoenixIO 
org.apache.phoenix.exception.P...
  UpsertBigValuesIT.testBigIntKV:323 » PhoenixIO 
org.apache.phoenix.exception.Ph...
  RegexpSubstrFunctionIT.testGroupByScanWithRegexpSubstr:68 » PhoenixIO 
org.apac...

Tests run: 405, Failures: 0, Errors: 15, Skipped: 0

[INFO] 
[INFO] --- maven-failsafe-plugin:2.17:in

git commit: PHOENIX-1309 Ensure Phoenix table is created for Local index and view index tables to store guideposts against them - Addendum (James Taylor via Ram)

2014-10-13 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/3.0 d42866918 -> 28d7f638f


PHOENIX-1309 Ensure Phoenix table is created for Local index and view
index tables to store guideposts against them - Addendum (James Taylor via
Ram)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/28d7f638
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/28d7f638
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/28d7f638

Branch: refs/heads/3.0
Commit: 28d7f638fca2bccfecc1d16c26efb5a3009eea6a
Parents: d428669
Author: Ramkrishna 
Authored: Mon Oct 13 14:25:51 2014 +0530
Committer: Ramkrishna 
Committed: Mon Oct 13 14:25:51 2014 +0530

--
 .../end2end/TenantSpecificViewIndexIT.java  |   2 +-
 .../apache/phoenix/compile/PostDDLCompiler.java |  14 +--
 .../phoenix/compile/StatementContext.java   |   4 +
 .../apache/phoenix/schema/MetaDataClient.java   | 105 +++
 .../java/org/apache/phoenix/query/BaseTest.java |   2 +-
 5 files changed, 74 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/28d7f638/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
index e7cdc01..8abda3b 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificViewIndexIT.java
@@ -124,7 +124,7 @@ public class TenantSpecificViewIndexIT extends 
BaseTenantSpecificViewIndexIT {
 }
 
 @Test
-public void testQueryingUsingTenantSpecific() throws Exception {
+public void testNonPaddedTenantId() throws Exception {
 String tenantId1 = "org1";
 String tenantId2 = "org2";
 String ddl = "CREATE TABLE T (tenantId char(15) NOT NULL, pk1 varchar 
NOT NULL, pk2 INTEGER NOT NULL, val1 VARCHAR CONSTRAINT pk primary key 
(tenantId,pk1,pk2)) MULTI_TENANT = true";

http://git-wip-us.apache.org/repos/asf/phoenix/blob/28d7f638/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java 
b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
index 294942f..033995e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostDDLCompiler.java
@@ -68,8 +68,13 @@ public class PostDDLCompiler {
 private final StatementContext context; // bogus context
 
 public PostDDLCompiler(PhoenixConnection connection) {
+this(connection, new Scan());
+}
+
+public PostDDLCompiler(PhoenixConnection connection, Scan scan) {
 this.connection = connection;
-this.context = new StatementContext(new PhoenixStatement(connection));
+this.context = new StatementContext(new PhoenixStatement(connection), 
scan);
+scan.setAttribute(BaseScannerRegionObserver.UNGROUPED_AGG, 
QueryConstants.TRUE);
 }
 
 public MutationPlan compile(final List tableRefs, final byte[] 
emptyCF, final byte[] projectCF, final List deleteList,
@@ -101,19 +106,16 @@ public class PostDDLCompiler {
 try {
 connection.setAutoCommit(true);
 SQLException sqlE = null;
-if (deleteList == null && emptyCF == null) {
-return new MutationState(0, connection);
-}
 /*
  * Handles:
  * 1) deletion of all rows for a DROP TABLE and 
subsequently deletion of all rows for a DROP INDEX;
  * 2) deletion of all column values for a ALTER TABLE DROP 
COLUMN
  * 3) updating the necessary rows to have an empty KV
+ * 4) updating table stats
  */
 long totalMutationCount = 0;
 for (final TableRef tableRef : tableRefs) {
-Scan scan = new Scan();
-
scan.setAttribute(BaseScannerRegionObserver.UNGROUPED_AGG, QueryConstants.TRUE);
+Scan scan = ScanUtil.newScan(context.getScan());
 SelectStatement select = SelectStatement.COUNT_ONE;
 // We need to use this tableRef
 ColumnResolver resolver = new ColumnResolver() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/28d7f638/

Build failed in Jenkins: Phoenix | 3.0 | Hadoop1 #252

2014-10-13 Thread Apache Jenkins Server
See 

Changes:

[ramkrishna] PHOENIX-1309 Ensure Phoenix table is created for Local index and 
view

--
Started by an SCM change
Building remotely on ubuntu-4 (docker Ubuntu ubuntu4 ubuntu) in workspace 

 > git rev-parse --is-inside-work-tree
Fetching changes from the remote Git repository
 > git config remote.origin.url 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git
Fetching upstream changes from 
https://git-wip-us.apache.org/repos/asf/phoenix.git
 > git --version
 > git fetch --tags --progress 
 > https://git-wip-us.apache.org/repos/asf/phoenix.git 
 > +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/3.0^{commit}
Checking out Revision 28d7f638fca2bccfecc1d16c26efb5a3009eea6a (origin/3.0)
 > git config core.sparsecheckout
 > git checkout -f 28d7f638fca2bccfecc1d16c26efb5a3009eea6a
 > git rev-list d42866918448d18e32fbdf3266dd8add7ec1b428
No emails were triggered.
[Phoenix-3.0-hadoop1] $ /home/jenkins/tools/maven/apache-maven-3.0.4/bin/mvn 
clean install -DnumForkedIT=4
[INFO] Scanning for projects...
[INFO] 
[INFO] Reactor Build Order:
[INFO] 
[INFO] Apache Phoenix
[INFO] Phoenix Hadoop Compatibility
[INFO] Phoenix Core
[INFO] Phoenix - Flume
[INFO] Phoenix - Pig
[INFO] Phoenix Hadoop1 Compatibility
[INFO] Phoenix Assembly
[INFO] 
[INFO] 
[INFO] Building Apache Phoenix 3.2.0-SNAPSHOT
[INFO] 
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix ---
[INFO] Deleting 
[INFO] 
[INFO] --- maven-remote-resources-plugin:1.5:process (default) @ phoenix ---
[INFO] 
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (attach-sources) @ phoenix ---
[INFO] 
[INFO] --- maven-jar-plugin:2.4:test-jar (default) @ phoenix ---
[WARNING] JAR will be empty - no content was marked for inclusion!
[INFO] Building jar: 

[INFO] 
[INFO] --- maven-site-plugin:3.2:attach-descriptor (attach-descriptor) @ 
phoenix ---
[INFO] 
[INFO] --- maven-install-plugin:2.5.1:install (default-install) @ phoenix ---
[INFO] Installing 
 to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/3.2.0-SNAPSHOT/phoenix-3.2.0-SNAPSHOT.pom
[INFO] Installing 

 to 
/home/jenkins/.m2/repository/org/apache/phoenix/phoenix/3.2.0-SNAPSHOT/phoenix-3.2.0-SNAPSHOT-tests.jar
[INFO] 
[INFO] 
[INFO] Building Phoenix Hadoop Compatibility 3.2.0-SNAPSHOT
[INFO] 
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ phoenix-hadoop-compat 
---
[INFO] Deleting 

[INFO] 
[INFO] --- maven-remote-resources-plugin:1.5:process (default) @ 
phoenix-hadoop-compat ---
[INFO] 
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ 
phoenix-hadoop-compat ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory 

[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.0:compile (default-compile) @ 
phoenix-hadoop-compat ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ 
phoenix-hadoop-compat ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] skip non existing resourceDirectory 

[INFO] Copying 3 resources
[INFO] 
[INFO] --- maven-compiler-plugin:3.0:testCompile (default-testCompile) @ 
phoenix-hadoop-compat ---
[INFO] No sources to compile
[INFO] 
[INFO] --- maven-surefire-plugin:2.17:test (default-test) @ 
phoenix-hadoop-compat ---
[INFO] 
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (attach-sources) @ 
phoenix-hadoop-compat ---
[INFO] Building jar: 

[INFO] 
[INFO] --- maven-jar-plugin:2.4:test-jar (default) @ phoenix-hadoop-compat ---
[INFO] Building jar: 


git commit: PHOENIX-1309 - Adding missing file DelegateTable.java

2014-10-13 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/3.0 28d7f638f -> ad4a40806


PHOENIX-1309 - Adding missing file DelegateTable.java


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ad4a4080
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ad4a4080
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ad4a4080

Branch: refs/heads/3.0
Commit: ad4a408064097e53ab492b24a213c8652aa9bd3c
Parents: 28d7f63
Author: Ramkrishna 
Authored: Mon Oct 13 15:04:13 2014 +0530
Committer: Ramkrishna 
Committed: Mon Oct 13 15:04:13 2014 +0530

--
 .../apache/phoenix/schema/DelegateTable.java| 236 +++
 1 file changed, 236 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ad4a4080/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java 
b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
new file mode 100644
index 000..695282a
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
@@ -0,0 +1,236 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.schema;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
+import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.schema.stats.PTableStats;
+
+public class DelegateTable implements PTable {
+@Override
+public long getTimeStamp() {
+return delegate.getTimeStamp();
+}
+
+@Override
+public long getSequenceNumber() {
+return delegate.getSequenceNumber();
+}
+
+@Override
+public PName getName() {
+return delegate.getName();
+}
+
+@Override
+public PName getSchemaName() {
+return delegate.getSchemaName();
+}
+
+@Override
+public PName getTableName() {
+return delegate.getTableName();
+}
+
+@Override
+public PName getTenantId() {
+return delegate.getTenantId();
+}
+
+@Override
+public PTableType getType() {
+return delegate.getType();
+}
+
+@Override
+public PName getPKName() {
+return delegate.getPKName();
+}
+
+@Override
+public List getPKColumns() {
+return delegate.getPKColumns();
+}
+
+@Override
+public List getColumns() {
+return delegate.getColumns();
+}
+
+@Override
+public List getColumnFamilies() {
+return delegate.getColumnFamilies();
+}
+
+@Override
+public PColumnFamily getColumnFamily(byte[] family) throws 
ColumnFamilyNotFoundException {
+return delegate.getColumnFamily(family);
+}
+
+@Override
+public PColumnFamily getColumnFamily(String family) throws 
ColumnFamilyNotFoundException {
+return delegate.getColumnFamily(family);
+}
+
+@Override
+public PColumn getColumn(String name) throws ColumnNotFoundException, 
AmbiguousColumnException {
+return delegate.getColumn(name);
+}
+
+@Override
+public PColumn getPKColumn(String name) throws ColumnNotFoundException {
+return delegate.getPKColumn(name);
+}
+
+@Override
+public PRow newRow(KeyValueBuilder builder, long ts, 
ImmutableBytesWritable key, byte[]... values) {
+return delegate.newRow(builder, ts, key, values);
+}
+
+@Override
+public PRow newRow(KeyValueBuilder builder, ImmutableBytesWritable key, 
byte[]... values) {
+return delegate.newRow(builder, key, values);
+}
+
+@Override
+public int newKey(ImmutableBytesWritable key, byte[][] values) {
+return delegate.newKey(key, values);
+}
+
+@Override
+public RowKeySchema getRowKeySchema() {
+return delegate.getRowKeySchema();
+}
+

Jenkins build is back to normal : Phoenix | 3.0 | Hadoop1 #253

2014-10-13 Thread Apache Jenkins Server
See 



Build failed in Jenkins: Phoenix | Master #420

2014-10-13 Thread Apache Jenkins Server
See 

Changes:

[gabrielr] PHOENIX-1341 Exit status for PhoenixRuntime

--
[...truncated 3487 lines...]
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:140)
... 8 more

at 
org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:107)
at 
org.apache.phoenix.iterate.TableResultIterator.(TableResultIterator.java:57)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:606)
at 
org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:601)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
at java.util.concurrent.FutureTask.run(FutureTask.java:166)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: 
Join.OrderTable,,1413188176857.72d776d083ec240d6d52472f24afc48d.: Requested 
memory of 332116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
at 
org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:51)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:158)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.postScannerOpen(RegionCoprocessorHost.java:1845)
at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3092)
at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29497)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2027)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:98)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
at java.lang.Thread.run(Thread.java:724)
Caused by: org.apache.phoenix.memory.InsufficientMemoryException: Requested 
memory of 332116 bytes is larger than global pool of 4 bytes.
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocateBytes(GlobalMemoryManager.java:72)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:100)
at 
org.apache.phoenix.memory.GlobalMemoryManager.allocate(GlobalMemoryManager.java:106)
at 
org.apache.phoenix.cache.aggcache.SpillableGroupByCache.(SpillableGroupByCache.java:150)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver$GroupByCacheFactory.newCache(GroupedAggregateRegionObserver.java:365)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.scanUnordered(GroupedAggregateRegionObserver.java:400)
at 
org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver.doPostScannerOpen(GroupedAggregateRegionObserver.java:161)
at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.postScannerOpen(BaseScannerRegionObserver.java:140)
... 8 more

at sun.reflect.GeneratedConstructorAccessor75.newInstance(Unknown 
Source)
at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at 
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at 
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at 
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:285)
at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:316)
at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164)
at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59)
at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114)
at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90)
at 
org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:282)
at 
org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:187)
at 
org.apache.hadoop.hbase.client.ClientScanner.(ClientSca

git commit: PHOENIX-1314 Assertion tripped for skip scan with two unit tests (Rajesh babu)

2014-10-13 Thread ramkrishna
Repository: phoenix
Updated Branches:
  refs/heads/3.0 ad4a40806 -> 8c37ab471


PHOENIX-1314 Assertion tripped for skip scan with two unit tests (Rajesh
babu)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8c37ab47
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8c37ab47
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8c37ab47

Branch: refs/heads/3.0
Commit: 8c37ab471402fcbc513861612022a8288bc93d04
Parents: ad4a408
Author: Ramkrishna 
Authored: Mon Oct 13 15:49:53 2014 +0530
Committer: Ramkrishna 
Committed: Mon Oct 13 15:49:53 2014 +0530

--
 .../src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c37ab47/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java 
b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java
index 8822e49..746ec5b 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexBuilder.java
@@ -59,6 +59,7 @@ public class PhoenixIndexBuilder extends 
CoveredColumnsIndexBuilder {
 }
 Scan scan = IndexManagementUtil.newLocalStateScan(maintainers);
 ScanRanges scanRanges = 
ScanRanges.create(SchemaUtil.VAR_BINARY_SCHEMA, 
Collections.singletonList(keys), ScanUtil.SINGLE_COLUMN_SLOT_SPAN);
+scanRanges.initializeScan(scan);
 scan.setFilter(scanRanges.getSkipScanFilter());
 HRegion region = this.env.getRegion();
 RegionScanner scanner = region.getScanner(scan);



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-10-13 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[ramkrishna] PHOENIX-1314 Assertion tripped for skip scan with two unit tests (Rajesh



git commit: PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL

2014-10-13 Thread maryannxue
Repository: phoenix
Updated Branches:
  refs/heads/master 656acefd1 -> 49ec34be2


PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/49ec34be
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/49ec34be
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/49ec34be

Branch: refs/heads/master
Commit: 49ec34be258ce12ca150c5c37a35e2c1cad0105c
Parents: 656acef
Author: maryannxue 
Authored: Mon Oct 13 11:05:00 2014 -0400
Committer: maryannxue 
Committed: Mon Oct 13 11:05:00 2014 -0400

--
 .../org/apache/phoenix/end2end/SubqueryIT.java  |  79 
 .../org/apache/phoenix/cache/HashCache.java |   4 +-
 .../apache/phoenix/compile/JoinCompiler.java|  15 +-
 .../apache/phoenix/compile/QueryCompiler.java   |   4 +-
 .../phoenix/compile/SubqueryRewriter.java   | 181 +++
 .../coprocessor/HashJoinRegionScanner.java  |   1 +
 .../apache/phoenix/execute/HashJoinPlan.java|   5 +-
 .../expression/ArrayConstructorExpression.java  |  16 +-
 .../phoenix/expression/ExpressionType.java  |   2 +
 .../DistinctValueClientAggregator.java  |  63 +++
 .../DistinctValueWithCountServerAggregator.java |   2 +-
 .../DistinctValueAggregateFunction.java |  66 +++
 .../apache/phoenix/join/HashCacheClient.java|   7 +-
 .../apache/phoenix/join/HashCacheFactory.java   |  15 +-
 .../org/apache/phoenix/parse/JoinTableNode.java |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java |   2 +-
 17 files changed, 417 insertions(+), 59 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/49ec34be/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
index 58d92f3..e4b4c8b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
@@ -899,6 +899,85 @@ public class SubqueryIT extends BaseHBaseManagedTimeIT {
 rs = conn.createStatement().executeQuery("EXPLAIN " + query);
 String plan = QueryUtil.getExplainPlan(rs);
 assertTrue("\"" + plan + "\" does not match \"" + plans[4] + "\"", 
Pattern.matches(plans[4], plan));
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '003')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+try {
+while(rs.next());
+fail("Should have got exception.");
+} catch (SQLException e) {
+}
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004' 
GROUP BY \"order_id\")";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \

git commit: PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL

2014-10-13 Thread maryannxue
Repository: phoenix
Updated Branches:
  refs/heads/4.0 debf09265 -> ae51cae69


PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ae51cae6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ae51cae6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ae51cae6

Branch: refs/heads/4.0
Commit: ae51cae6966062b5d1c19cc87419100ccee23629
Parents: debf092
Author: maryannxue 
Authored: Mon Oct 13 11:14:53 2014 -0400
Committer: maryannxue 
Committed: Mon Oct 13 11:14:53 2014 -0400

--
 .../org/apache/phoenix/end2end/SubqueryIT.java  |  79 
 .../org/apache/phoenix/cache/HashCache.java |   4 +-
 .../apache/phoenix/compile/JoinCompiler.java|  15 +-
 .../apache/phoenix/compile/QueryCompiler.java   |   4 +-
 .../phoenix/compile/SubqueryRewriter.java   | 181 +++
 .../coprocessor/HashJoinRegionScanner.java  |   1 +
 .../apache/phoenix/execute/HashJoinPlan.java|   5 +-
 .../expression/ArrayConstructorExpression.java  |  16 +-
 .../phoenix/expression/ExpressionType.java  |   2 +
 .../DistinctValueClientAggregator.java  |  63 +++
 .../DistinctValueWithCountServerAggregator.java |   2 +-
 .../DistinctValueAggregateFunction.java |  66 +++
 .../apache/phoenix/join/HashCacheClient.java|   7 +-
 .../apache/phoenix/join/HashCacheFactory.java   |  14 +-
 .../org/apache/phoenix/parse/JoinTableNode.java |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java |   2 +-
 17 files changed, 417 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ae51cae6/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
index 58d92f3..e4b4c8b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
@@ -899,6 +899,85 @@ public class SubqueryIT extends BaseHBaseManagedTimeIT {
 rs = conn.createStatement().executeQuery("EXPLAIN " + query);
 String plan = QueryUtil.getExplainPlan(rs);
 assertTrue("\"" + plan + "\" does not match \"" + plans[4] + "\"", 
Pattern.matches(plans[4], plan));
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '003')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+try {
+while(rs.next());
+fail("Should have got exception.");
+} catch (SQLException e) {
+}
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004' 
GROUP BY \"order_id\")";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \"item_

git commit: PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL

2014-10-13 Thread maryannxue
Repository: phoenix
Updated Branches:
  refs/heads/3.0 8c37ab471 -> cc436c9b4


PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/cc436c9b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/cc436c9b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/cc436c9b

Branch: refs/heads/3.0
Commit: cc436c9b43c48d636d0659cdb29618e957275111
Parents: 8c37ab4
Author: maryannxue 
Authored: Mon Oct 13 11:25:43 2014 -0400
Committer: maryannxue 
Committed: Mon Oct 13 11:25:43 2014 -0400

--
 .../org/apache/phoenix/end2end/SubqueryIT.java  |  79 
 .../org/apache/phoenix/cache/HashCache.java |   4 +-
 .../apache/phoenix/compile/JoinCompiler.java|  15 +-
 .../apache/phoenix/compile/QueryCompiler.java   |   4 +-
 .../phoenix/compile/SubqueryRewriter.java   | 181 +++
 .../coprocessor/HashJoinRegionScanner.java  |   1 +
 .../apache/phoenix/execute/HashJoinPlan.java|   5 +-
 .../expression/ArrayConstructorExpression.java  |  16 +-
 .../phoenix/expression/ExpressionType.java  |   2 +
 .../DistinctValueClientAggregator.java  |  63 +++
 .../DistinctValueWithCountServerAggregator.java |   2 +-
 .../DistinctValueAggregateFunction.java |  66 +++
 .../apache/phoenix/join/HashCacheClient.java|   7 +-
 .../apache/phoenix/join/HashCacheFactory.java   |  14 +-
 .../org/apache/phoenix/parse/JoinTableNode.java |   8 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |   6 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java |   2 +-
 17 files changed, 417 insertions(+), 58 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/cc436c9b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
index 61ab788..f0b8cc1 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
@@ -822,6 +822,85 @@ public class SubqueryIT extends BaseHBaseManagedTimeIT {
 rs = conn.createStatement().executeQuery("EXPLAIN " + query);
 String plan = QueryUtil.getExplainPlan(rs);
 assertTrue("\"" + plan + "\" does not match \"" + plans[4] + "\"", 
Pattern.matches(plans[4], plan));
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " 
WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '003')";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+try {
+while(rs.next());
+fail("Should have got exception.");
+} catch (SQLException e) {
+}
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \"item_id\" AND \"order_id\" != '004' 
GROUP BY \"order_id\")";
+statement = conn.prepareStatement(query);
+rs = statement.executeQuery();
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "001");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "002");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "003");
+assertTrue (rs.next());
+assertEquals(rs.getString(1), "005");
+
+assertFalse(rs.next());
+
+query = "SELECT \"order_id\" FROM " + JOIN_ORDER_TABLE_FULL_NAME + 
" o WHERE quantity = (SELECT max(quantity) FROM " + JOIN_ORDER_TABLE_FULL_NAME 
+ " WHERE o.\"item_id\" = \"item_

Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-10-13 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[maryannxue] PHOENIX-1332 Support correlated subqueries in comparison with ANY/SOME/ALL



git commit: PHOENIX-619 Support DELETE over table with immutable index when possible

2014-10-13 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master 49ec34be2 -> 8f2a76d60


PHOENIX-619 Support DELETE over table with immutable index when possible


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8f2a76d6
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8f2a76d6
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8f2a76d6

Branch: refs/heads/master
Commit: 8f2a76d60ba1452466133da8e1dc3de8d1140ed2
Parents: 49ec34b
Author: James Taylor 
Authored: Mon Oct 13 20:23:43 2014 -0700
Committer: James Taylor 
Committed: Mon Oct 13 20:23:43 2014 -0700

--
 .../end2end/BaseTenantSpecificTablesIT.java |   4 +-
 .../end2end/TenantSpecificTablesDMLIT.java  |  43 ++
 .../phoenix/end2end/index/ImmutableIndexIT.java |   2 +-
 .../phoenix/end2end/index/ViewIndexIT.java  |   5 +-
 .../apache/phoenix/compile/DeleteCompiler.java  | 550 ---
 .../MutatingParallelIteratorFactory.java|   5 +-
 .../phoenix/compile/PostIndexDDLCompiler.java   |  37 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   4 +-
 .../phoenix/exception/SQLExceptionCode.java |   2 +-
 .../apache/phoenix/execute/MutationState.java   |  45 +-
 .../apache/phoenix/jdbc/PhoenixResultSet.java   |   4 +
 .../apache/phoenix/optimize/QueryOptimizer.java |  53 +-
 .../query/ConnectionQueryServicesImpl.java  |   2 +
 .../apache/phoenix/schema/MetaDataClient.java   |  48 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |  14 +-
 .../phoenix/compile/QueryCompilerTest.java  |  14 +-
 .../TenantSpecificViewIndexCompileTest.java |   2 +-
 17 files changed, 541 insertions(+), 293 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8f2a76d6/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
index 362fa08..6d6bffc 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
@@ -44,7 +44,7 @@ public abstract class BaseTenantSpecificTablesIT extends 
BaseOwnClusterClientMan
 "tenant_id VARCHAR(5) NOT NULL,\n" + 
 "tenant_type_id VARCHAR(3) NOT NULL, \n" + 
 "id INTEGER NOT NULL\n" + 
-"CONSTRAINT pk PRIMARY KEY (tenant_id, 
tenant_type_id, id)) MULTI_TENANT=true";
+"CONSTRAINT pk PRIMARY KEY (tenant_id, 
tenant_type_id, id)) MULTI_TENANT=true, IMMUTABLE_ROWS=true";
 
 protected static final String TENANT_TABLE_NAME = "TENANT_TABLE";
 protected static final String TENANT_TABLE_DDL = "CREATE VIEW " + 
TENANT_TABLE_NAME + " ( \n" + 
@@ -56,7 +56,7 @@ public abstract class BaseTenantSpecificTablesIT extends 
BaseOwnClusterClientMan
 "user VARCHAR ,\n" + 
 "tenant_id VARCHAR(5) NOT NULL,\n" + 
 "id INTEGER NOT NULL,\n" + 
-"CONSTRAINT pk PRIMARY KEY (tenant_id, id)) 
MULTI_TENANT=true";
+"CONSTRAINT pk PRIMARY KEY (tenant_id, id)) 
MULTI_TENANT=true, IMMUTABLE_ROWS=true";
 
 protected static final String TENANT_TABLE_NAME_NO_TENANT_TYPE_ID = 
"TENANT_TABLE_NO_TENANT_TYPE_ID";
 protected static final String TENANT_TABLE_DDL_NO_TENANT_TYPE_ID = "CREATE 
VIEW " + TENANT_TABLE_NAME_NO_TENANT_TYPE_ID + " ( \n" + 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8f2a76d6/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
index f0ed1d0..cdc3c07 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
@@ -267,6 +267,49 @@ public class TenantSpecificTablesDMLIT extends 
BaseTenantSpecificTablesIT {
 }
 
 @Test
+public void testDeleteWhenImmutableIndex() throws Exception {
+Connection conn = nextConnection(getUrl());
+try {
+conn.setAutoCommit(true);
+conn.createStatement().executeUpdate("delete from " + 
PARENT_TABLE_NAME);
+conn.close();
+
+conn = nextConnection(getUrl());
+ 

git commit: PHOENIX-619 Support DELETE over table with immutable index when possible

2014-10-13 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 ae51cae69 -> 6c47f8a2b


PHOENIX-619 Support DELETE over table with immutable index when possible


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6c47f8a2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6c47f8a2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6c47f8a2

Branch: refs/heads/4.0
Commit: 6c47f8a2ba19f00db9369df1f7efdfdf0fbfd5f2
Parents: ae51cae
Author: James Taylor 
Authored: Mon Oct 13 20:23:43 2014 -0700
Committer: James Taylor 
Committed: Mon Oct 13 22:58:24 2014 -0700

--
 .../end2end/BaseTenantSpecificTablesIT.java |   4 +-
 .../end2end/TenantSpecificTablesDMLIT.java  |  43 ++
 .../phoenix/end2end/index/ImmutableIndexIT.java |   2 +-
 .../phoenix/end2end/index/ViewIndexIT.java  |   5 +-
 .../apache/phoenix/compile/DeleteCompiler.java  | 550 ---
 .../MutatingParallelIteratorFactory.java|   5 +-
 .../phoenix/compile/PostIndexDDLCompiler.java   |  37 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   4 +-
 .../phoenix/exception/SQLExceptionCode.java |   2 +-
 .../apache/phoenix/execute/MutationState.java   |  45 +-
 .../apache/phoenix/jdbc/PhoenixResultSet.java   |   4 +
 .../apache/phoenix/optimize/QueryOptimizer.java |  53 +-
 .../query/ConnectionQueryServicesImpl.java  |   2 +
 .../apache/phoenix/schema/MetaDataClient.java   |  48 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |  14 +-
 .../phoenix/compile/QueryCompilerTest.java  |  14 +-
 .../TenantSpecificViewIndexCompileTest.java |   2 +-
 17 files changed, 541 insertions(+), 293 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6c47f8a2/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
index 362fa08..6d6bffc 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseTenantSpecificTablesIT.java
@@ -44,7 +44,7 @@ public abstract class BaseTenantSpecificTablesIT extends 
BaseOwnClusterClientMan
 "tenant_id VARCHAR(5) NOT NULL,\n" + 
 "tenant_type_id VARCHAR(3) NOT NULL, \n" + 
 "id INTEGER NOT NULL\n" + 
-"CONSTRAINT pk PRIMARY KEY (tenant_id, 
tenant_type_id, id)) MULTI_TENANT=true";
+"CONSTRAINT pk PRIMARY KEY (tenant_id, 
tenant_type_id, id)) MULTI_TENANT=true, IMMUTABLE_ROWS=true";
 
 protected static final String TENANT_TABLE_NAME = "TENANT_TABLE";
 protected static final String TENANT_TABLE_DDL = "CREATE VIEW " + 
TENANT_TABLE_NAME + " ( \n" + 
@@ -56,7 +56,7 @@ public abstract class BaseTenantSpecificTablesIT extends 
BaseOwnClusterClientMan
 "user VARCHAR ,\n" + 
 "tenant_id VARCHAR(5) NOT NULL,\n" + 
 "id INTEGER NOT NULL,\n" + 
-"CONSTRAINT pk PRIMARY KEY (tenant_id, id)) 
MULTI_TENANT=true";
+"CONSTRAINT pk PRIMARY KEY (tenant_id, id)) 
MULTI_TENANT=true, IMMUTABLE_ROWS=true";
 
 protected static final String TENANT_TABLE_NAME_NO_TENANT_TYPE_ID = 
"TENANT_TABLE_NO_TENANT_TYPE_ID";
 protected static final String TENANT_TABLE_DDL_NO_TENANT_TYPE_ID = "CREATE 
VIEW " + TENANT_TABLE_NAME_NO_TENANT_TYPE_ID + " ( \n" + 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/6c47f8a2/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
--
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
index f0ed1d0..cdc3c07 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/TenantSpecificTablesDMLIT.java
@@ -267,6 +267,49 @@ public class TenantSpecificTablesDMLIT extends 
BaseTenantSpecificTablesIT {
 }
 
 @Test
+public void testDeleteWhenImmutableIndex() throws Exception {
+Connection conn = nextConnection(getUrl());
+try {
+conn.setAutoCommit(true);
+conn.createStatement().executeUpdate("delete from " + 
PARENT_TABLE_NAME);
+conn.close();
+
+conn = nextConnection(getUrl());
+   

Jenkins build is back to stable : Phoenix | Master #422

2014-10-13 Thread Apache Jenkins Server
See