Jenkins build is back to normal : Phoenix | 4.0 | Hadoop2 #76

2014-08-28 Thread Apache Jenkins Server
See 



Apache-Phoenix | 3.0 | Hadoop1 | Build Successful

2014-08-28 Thread Apache Jenkins Server
3.0 branch build status Successful
Source repository https://git-wip-us.apache.org/repos/asf/phoenix.git

Last Successful Compiled Artifacts https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastSuccessfulBuild/artifact/

Last Complete Test Report https://builds.apache.org/job/Phoenix-3.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1208 Check for existence of views doesn't take into account the fact that SYSTEM.CATALOG could be split across regions



Jenkins build became unstable: Phoenix | Master | Hadoop1 #350

2014-08-28 Thread Apache Jenkins Server
See 



Apache-Phoenix | 4.0 | Hadoop1 | Build Successful

2014-08-28 Thread Apache Jenkins Server
4.0 branch build status Successful

Source repository https://git-wip-us.apache.org/repos/asf/incubator-phoenix.git

Compiled Artifacts https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastSuccessfulBuild/artifact/

Test Report https://builds.apache.org/job/Phoenix-4.0-hadoop1/lastCompletedBuild/testReport/

Changes
[jtaylor] PHOENIX-1208 Check for existence of views doesn't take into account the fact that SYSTEM.CATALOG could be split across regions



git commit: PHOENIX-1208 Check for existence of views doesn't take into account the fact that SYSTEM.CATALOG could be split across regions

2014-08-28 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/3.0 e1ba4dec4 -> 87b223fcc


PHOENIX-1208 Check for existence of views doesn't take into account the fact 
that SYSTEM.CATALOG could be split across regions

Conflicts:

phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/87b223fc
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/87b223fc
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/87b223fc

Branch: refs/heads/3.0
Commit: 87b223fcc88e030f4e85677f49fc6004ca13c78e
Parents: e1ba4de
Author: James Taylor 
Authored: Thu Aug 28 22:29:50 2014 -0700
Committer: James Taylor 
Committed: Thu Aug 28 22:44:20 2014 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   | 67 +---
 .../phoenix/coprocessor/SuffixFilter.java   | 45 +
 2 files changed, 61 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/87b223fc/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 9cb3b89..b7343d1 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.coprocessor;
 
-import static com.google.common.collect.Lists.newArrayList;
-import static org.apache.hadoop.hbase.filter.CompareFilter.CompareOp.EQUAL;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME_INDEX;
@@ -54,8 +52,6 @@ import static org.apache.phoenix.schema.PTableType.INDEX;
 import static org.apache.phoenix.util.SchemaUtil.getVarCharLength;
 import static org.apache.phoenix.util.SchemaUtil.getVarChars;
 
-import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.IOException;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
@@ -68,14 +64,16 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterBase;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
@@ -596,7 +594,7 @@ public class MetaDataEndpointImpl extends 
BaseEndpointCoprocessor implements Met
 }
 }
 
-private static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
+protected static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
 /**
  * @param tableName parent table's name
  * @return true if there exist a table that use this table as their base 
table.
@@ -616,21 +614,25 @@ public class MetaDataEndpointImpl extends 
BaseEndpointCoprocessor implements Met
 scan.setStartRow(startRow);
 scan.setStopRow(stopRow);
 }
-SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, EQUAL, 
PHYSICAL_TABLE_BYTES);
+SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, CompareOp.EQUAL, 
PHYSICAL_TABLE_BYTES);
 linkFilter.setFilterIfMissing(true);
 byte[] suffix = ByteUtil.concat(QueryConstants.SEPARATOR_BYTE_ARRAY, 
SchemaUtil.getTableNameAsBytes(schemaName, tableName));
 SuffixFilter rowFilter = new SuffixFilter(suffix);
 Filter filter = new FilterList(linkFilter, rowFilter);
 scan.setFilter(filter);
 scan.addColumn(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES);
-RegionScanner scanner = region.getScanner(scan);

git commit: PHOENIX-1208 Check for existence of views doesn't take into account the fact that SYSTEM.CATALOG could be split across regions

2014-08-28 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/master bd174c1c9 -> 867af78dd


PHOENIX-1208 Check for existence of views doesn't take into account the fact 
that SYSTEM.CATALOG could be split across regions


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/867af78d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/867af78d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/867af78d

Branch: refs/heads/master
Commit: 867af78dda90910f07dee0ac9c95cd4a7f50a749
Parents: bd174c1
Author: James Taylor 
Authored: Thu Aug 28 22:29:50 2014 -0700
Committer: James Taylor 
Committed: Thu Aug 28 22:31:06 2014 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   | 73 
 .../phoenix/coprocessor/SuffixFilter.java   | 50 ++
 2 files changed, 79 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/867af78d/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 2f7b34f..020a3c9 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.coprocessor;
 
-import static com.google.common.collect.Lists.newArrayList;
-import static org.apache.hadoop.hbase.filter.CompareFilter.CompareOp.EQUAL;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME_INDEX;
@@ -73,15 +71,18 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterBase;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
@@ -857,12 +858,13 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 locks.add(rowLock);
 }
 
-private static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
+protected static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
 /**
  * @param tableName parent table's name
  * @return true if there exist a table that use this table as their base 
table.
  * TODO: should we pass a timestamp here?
  */
+@SuppressWarnings("deprecation")
 private boolean hasViews(HRegion region, byte[] tenantId, PTable table) 
throws IOException {
 byte[] schemaName = table.getSchemaName().getBytes();
 byte[] tableName = table.getTableName().getBytes();
@@ -877,21 +879,36 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 scan.setStartRow(startRow);
 scan.setStopRow(stopRow);
 }
-SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, EQUAL, 
PHYSICAL_TABLE_BYTES);
+SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, CompareOp.EQUAL, 
PHYSICAL_TABLE_BYTES);
 linkFilter.setFilterIfMissing(true);
 byte[] suffix = ByteUtil.concat(QueryConstants.SEPARATOR_BYTE_ARRAY, 
SchemaUtil.getTableNameAsBytes(schemaName, tableName));
 SuffixFilter rowFilter = new SuffixFilter(suffix);
 Filter filter = new FilterList(linkFilter, rowFilter);
 scan.setFilter(filter);
 scan.addColumn(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES);
-RegionScanner scanner = region.getScanner(scan);
+

git commit: PHOENIX-1208 Check for existence of views doesn't take into account the fact that SYSTEM.CATALOG could be split across regions

2014-08-28 Thread jamestaylor
Repository: phoenix
Updated Branches:
  refs/heads/4.0 13903e055 -> 6fb2b22b9


PHOENIX-1208 Check for existence of views doesn't take into account the fact 
that SYSTEM.CATALOG could be split across regions


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/6fb2b22b
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/6fb2b22b
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/6fb2b22b

Branch: refs/heads/4.0
Commit: 6fb2b22b9a30ecf74b608cc1d6081b7889763f20
Parents: 13903e0
Author: James Taylor 
Authored: Thu Aug 28 22:29:50 2014 -0700
Committer: James Taylor 
Committed: Thu Aug 28 22:29:50 2014 -0700

--
 .../coprocessor/MetaDataEndpointImpl.java   | 73 
 .../phoenix/coprocessor/SuffixFilter.java   | 50 ++
 2 files changed, 79 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/phoenix/blob/6fb2b22b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
--
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 2f7b34f..020a3c9 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -17,8 +17,6 @@
  */
 package org.apache.phoenix.coprocessor;
 
-import static com.google.common.collect.Lists.newArrayList;
-import static org.apache.hadoop.hbase.filter.CompareFilter.CompareOp.EQUAL;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT_BYTES;
 import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME_INDEX;
@@ -73,15 +71,18 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.FilterBase;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
@@ -857,12 +858,13 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 locks.add(rowLock);
 }
 
-private static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
+protected static final byte[] PHYSICAL_TABLE_BYTES = new byte[] 
{PTable.LinkType.PHYSICAL_TABLE.getSerializedValue()};
 /**
  * @param tableName parent table's name
  * @return true if there exist a table that use this table as their base 
table.
  * TODO: should we pass a timestamp here?
  */
+@SuppressWarnings("deprecation")
 private boolean hasViews(HRegion region, byte[] tenantId, PTable table) 
throws IOException {
 byte[] schemaName = table.getSchemaName().getBytes();
 byte[] tableName = table.getTableName().getBytes();
@@ -877,21 +879,36 @@ public class MetaDataEndpointImpl extends 
MetaDataProtocol implements Coprocesso
 scan.setStartRow(startRow);
 scan.setStopRow(stopRow);
 }
-SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, EQUAL, 
PHYSICAL_TABLE_BYTES);
+SingleColumnValueFilter linkFilter = new 
SingleColumnValueFilter(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES, CompareOp.EQUAL, 
PHYSICAL_TABLE_BYTES);
 linkFilter.setFilterIfMissing(true);
 byte[] suffix = ByteUtil.concat(QueryConstants.SEPARATOR_BYTE_ARRAY, 
SchemaUtil.getTableNameAsBytes(schemaName, tableName));
 SuffixFilter rowFilter = new SuffixFilter(suffix);
 Filter filter = new FilterList(linkFilter, rowFilter);
 scan.setFilter(filter);
 scan.addColumn(TABLE_FAMILY_BYTES, LINK_TYPE_BYTES);
-RegionScanner scanner = region.getScanner(scan);
+//

Git Push Summary

2014-08-28 Thread mujtaba
Repository: phoenix
Updated Tags:  refs/tags/v3.1.0 [created] 56fb3c560


Git Push Summary

2014-08-28 Thread mujtaba
Repository: phoenix
Updated Tags:  refs/tags/v4.1.0 [created] d54043feb


svn commit: r6297 - in /release/phoenix: phoenix-3.1.0/ phoenix-3.1.0/bin/ phoenix-3.1.0/src/ phoenix-4.1.0/ phoenix-4.1.0/bin/ phoenix-4.1.0/src/

2014-08-28 Thread mujtaba
Author: mujtaba
Date: Fri Aug 29 00:50:36 2014
New Revision: 6297

Log:
Phoenix 3.1.0, 4.1.0

Added:
release/phoenix/phoenix-3.1.0/
release/phoenix/phoenix-3.1.0/bin/
release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz   (with props)
release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.asc
release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.md5
release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.sha
release/phoenix/phoenix-3.1.0/src/
release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz   (with props)
release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.asc
release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.md5
release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.sha
release/phoenix/phoenix-4.1.0/
release/phoenix/phoenix-4.1.0/bin/
release/phoenix/phoenix-4.1.0/bin/phoenix-4.1.0-bin.tar.gz   (with props)
release/phoenix/phoenix-4.1.0/bin/phoenix-4.1.0-bin.tar.gz.asc
release/phoenix/phoenix-4.1.0/bin/phoenix-4.1.0-bin.tar.gz.md5
release/phoenix/phoenix-4.1.0/bin/phoenix-4.1.0-bin.tar.gz.sha
release/phoenix/phoenix-4.1.0/src/
release/phoenix/phoenix-4.1.0/src/phoenix-4.1.0-src.tar.gz   (with props)
release/phoenix/phoenix-4.1.0/src/phoenix-4.1.0-src.tar.gz.asc
release/phoenix/phoenix-4.1.0/src/phoenix-4.1.0-src.tar.gz.md5
release/phoenix/phoenix-4.1.0/src/phoenix-4.1.0-src.tar.gz.sha

Added: release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz
==
Binary file - no diff available.

Propchange: release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz
--
svn:mime-type = application/octet-stream

Added: release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.asc
==
--- release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.asc (added)
+++ release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.asc Fri Aug 29 
00:50:36 2014
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+iQIcBAABAgAGBQJT+4jrAAoJEDv8s5KUYReOFkoQAK4ioYtIwU+V0fFp4JJMiynn
+lXl5TVkuEDnn89FjooBDbPMEegIyg0ZxjGuDPiZxG8g2BwHYJAokF8Yx4TPUIpFt
+/iZjTH41u/OvqMOB9KFIxyzOP09BU7pWixrNmrkcfFD25FiDJXgvWMEi184+Dx1U
+709Y4c98K/DIwfglhEU1lb68huN3MlvUqrNZ5WXkjXhxSJ4QO0VzckJTX7QQWF1Q
+mWCp4NXUAgQTX66DHpCWcbgBuXHz9cWF0z3O+BlfdUWcwwyAgJ5vXS7WYO0l7hD5
+JWPkNXwO7g+7yfPEoSCRgCnGCraQ167ORv7Y3XTrxhNBnM6rl6Gg6irIBqnxAtS9
+GmqlJUXYcH3hS0VKf5rwwZhqMGbDCEm3M2U48ITSxwea39OwD1lsB78KFb/a92o/
+1lMG70H1XC6z49tSj43c/FM7i1d66FhEBswoAY0c/yJXI+qbuOFaKKrgNsllW20C
+6cvkVq0VFAw3BoKAc24/7VihJ0xVZgdMutnJR4BOzbEGVkGqZR0vGJj/PCRq49We
+MIVNg7JlFiRpWnwG3oNNVAyS7PvYtImX50Z/23+rPAt3EQlIbn/GKqwlNleWzVKb
+mea+8DWUM29l08/Tc43I1b7CQCy23x36oqq1Q01J5D5YKK3fTTCOyLyo+VaIolYk
+i1TIUlMGE3mJSa55isAY
+=40Yr
+-END PGP SIGNATURE-

Added: release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.md5
==
--- release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.md5 (added)
+++ release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.md5 Fri Aug 29 
00:50:36 2014
@@ -0,0 +1 @@
+9daa97d750483d3ec30c3b4560704939 *phoenix-3.1.0-bin.tar.gz

Added: release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.sha
==
--- release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.sha (added)
+++ release/phoenix/phoenix-3.1.0/bin/phoenix-3.1.0-bin.tar.gz.sha Fri Aug 29 
00:50:36 2014
@@ -0,0 +1,2 @@
+554bf337a48812f4591f74199cad55a235181d52b8cf902cffe7d376afebabf5e3be00670d60f4d9b78e613fb8aec07f3c72c3fb2e3ad30ff6aa298ade989453
 *phoenix-3.1.0-bin.tar.gz
+308e6621db2ad39d586996eb3dd21013e8cbfed931c5c944f52b18aa2ce33e6b 
*phoenix-3.1.0-bin.tar.gz

Added: release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz
==
Binary file - no diff available.

Propchange: release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz
--
svn:mime-type = application/octet-stream

Added: release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.asc
==
--- release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.asc (added)
+++ release/phoenix/phoenix-3.1.0/src/phoenix-3.1.0-src.tar.gz.asc Fri Aug 29 
00:50:36 2014
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v1.4.11 (GNU/Linux)
+
+iQIcBAABAgAGBQJT+4nBAAoJEDv8s5KUYReOf0kQAKPhCyGtZT8fa9Aluvi3ETdg
+DYh8DwIfddRp5SnufFxYuuYdMRzbdY7RrqYStXsnuFcans+jkqQgO6xoPTK2sdvC
+hmTmYSrbqJV1MrmkRos0j2yXUdFjzkxhWkZhB2FTNpaT26ykKcZSEle1RPfVYFqa
+6cJ9wATFLi5lQWg3TrDl5vnmRD4G9dYCs5qjGQn

Build failed in Jenkins: Phoenix | 4.0 | Hadoop2 #75

2014-08-28 Thread Apache Jenkins Server
See 

--
Started by timer
Building remotely on ubuntu3 (Ubuntu ubuntu) in workspace 

 > git rev-parse --is-inside-work-tree
Fetching changes from the remote Git repository
 > git config remote.origin.url git://git.apache.org/phoenix.git/
Fetching upstream changes from git://git.apache.org/phoenix.git/
 > git --version
 > git fetch --tags --progress git://git.apache.org/phoenix.git/ 
 > +refs/heads/*:refs/remotes/origin/*
 > git rev-parse origin/4.0^{commit}
Checking out Revision 13903e055f301226bf12c0e256cf4c4f767bd88c (origin/4.0)
 > git config core.sparsecheckout
 > git checkout -f 13903e055f301226bf12c0e256cf4c4f767bd88c
 > git rev-list 13903e055f301226bf12c0e256cf4c4f767bd88c
No emails were triggered.
FATAL: Unable to produce a script file
java.io.IOException: Failed to create a temp file on 

at hudson.FilePath.createTextTempFile(FilePath.java:1265)
at 
hudson.tasks.CommandInterpreter.createScriptFile(CommandInterpreter.java:144)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:82)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:66)
at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:804)
at hudson.model.Build$BuildExecution.build(Build.java:199)
at hudson.model.Build$BuildExecution.doRun(Build.java:160)
at 
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:585)
at hudson.model.Run.execute(Run.java:1676)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:88)
at hudson.model.Executor.run(Executor.java:231)
Caused by: java.io.IOException: remote file operation failed: 
 at 
hudson.remoting.Channel@302712f4:ubuntu3
at hudson.FilePath.act(FilePath.java:910)
at hudson.FilePath.act(FilePath.java:887)
at hudson.FilePath.createTextTempFile(FilePath.java:1239)
... 12 more
Caused by: java.io.IOException: No space left on device
at java.io.FileOutputStream.writeBytes(Native Method)
at java.io.FileOutputStream.write(FileOutputStream.java:345)
at sun.nio.cs.StreamEncoder.writeBytes(StreamEncoder.java:221)
at sun.nio.cs.StreamEncoder.implClose(StreamEncoder.java:316)
at sun.nio.cs.StreamEncoder.close(StreamEncoder.java:149)
at java.io.OutputStreamWriter.close(OutputStreamWriter.java:233)
at hudson.FilePath$15.invoke(FilePath.java:1258)
at hudson.FilePath$15.invoke(FilePath.java:1239)
at hudson.FilePath$FileCallableWrapper.call(FilePath.java:2462)
at hudson.remoting.UserRequest.perform(UserRequest.java:118)
at hudson.remoting.UserRequest.perform(UserRequest.java:48)
at hudson.remoting.Request$2.run(Request.java:328)
at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Build step 'Execute shell' marked build as failure
Archiving artifacts
Sending artifact delta relative to Phoenix | 4.0 | Hadoop2 #74
ERROR: Failed to archive artifacts: 
**/surefire-reports/*,**/failsafe-reports/*.xml,**/*.txt,**/*.jar,**/bin/*.sh,**/bin/*.sh,**/bin/*.xml,**/bin/*.properties
java.io.EOFException: Unexpected end of ZLIB input stream
at java.util.zip.InflaterInputStream.fill(InflaterInputStream.java:240)
at java.util.zip.InflaterInputStream.read(InflaterInputStream.java:158)
at java.util.zip.GZIPInputStream.read(GZIPInputStream.java:116)
at com.google.common.io.LimitInputStream.read(LimitInputStream.java:79)
at jsync.protocol.ChangeInputStream.read(ChangeInputStream.java:69)
at 
com.cloudbees.jenkins.plugins.jsync.archiver.MD5DigestingInputStream.read(MD5DigestingInputStream.java:39)
at com.google.common.io.LimitInputStream.read(LimitInputStream.java:79)
at java.io.FilterInputStream.read(FilterInputStream.java:107)
at com.google.common.io.ByteStreams.copy(ByteStreams.java:193)
at jsync.protocol.FileSequenceReader.read(FileSequenceReader.java:35)
at 
com.cloudbees.jenkins.plugins.jsync.archiver.JSyncArtifactManager.remoteSync(JSyncArtifactManager.java:134)
at 
com.cloudbees.jenkins.plugins.jsync.archiver.JSyncArtifactManager.archive(JSyncArtifactManager.java:67)
at hudson.tasks.Arti

svn commit: r1621236 - in /phoenix/site: publish/ publish/language/ source/src/site/markdown/

2014-08-28 Thread mujtaba
Author: mujtaba
Date: Fri Aug 29 00:31:36 2014
New Revision: 1621236

URL: http://svn.apache.org/r1621236
Log:
How to do a release - initial doc.

Added:
phoenix/site/publish/release.html
phoenix/site/source/src/site/markdown/release.md
Modified:
phoenix/site/publish/building.html
phoenix/site/publish/building_website.html
phoenix/site/publish/bulk_dataload.html
phoenix/site/publish/joins.html
phoenix/site/publish/language/datatypes.html
phoenix/site/publish/language/functions.html
phoenix/site/publish/language/index.html
phoenix/site/publish/mailing_list.html
phoenix/site/publish/phoenix_on_emr.html
phoenix/site/publish/salted.html
phoenix/site/publish/skip_scan.html
phoenix/site/publish/team.html
phoenix/site/publish/tracing.html
phoenix/site/publish/upgrade_from_2_2.html
phoenix/site/source/src/site/markdown/building.md

Modified: phoenix/site/publish/building.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/building.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/building.html (original)
+++ phoenix/site/publish/building.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 
@@ -182,7 +182,9 @@
  Branches 
  Phoenix 3.0 is running against hbase0.94+, Phoenix 4.0 is running against 
hbase0.98.1+ and Phoenix master branch is running against hbase trunk 
branch. 
   
- See also: Building Project Web 
Site 
+ See also:  
+ Building Project Web Site 
+ How to do a release 
 



Modified: phoenix/site/publish/building_website.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/building_website.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/building_website.html (original)
+++ phoenix/site/publish/building_website.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/bulk_dataload.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/bulk_dataload.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/bulk_dataload.html (original)
+++ phoenix/site/publish/bulk_dataload.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/joins.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/joins.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/joins.html (original)
+++ phoenix/site/publish/joins.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/language/datatypes.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/language/datatypes.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/language/datatypes.html (original)
+++ phoenix/site/publish/language/datatypes.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/language/functions.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/language/functions.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/language/functions.html (original)
+++ phoenix/site/publish/language/functions.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/language/index.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/language/index.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/language/index.html (original)
+++ phoenix/site/publish/language/index.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/mailing_list.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/mailing_list.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/mailing_list.html (original)
+++ phoenix/site/publish/mailing_list.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Modified: phoenix/site/publish/phoenix_on_emr.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/phoenix_on_emr.html?rev=1621236&r1=1621235&r2=1621236&view=diff
==
--- phoenix/site/publish/phoenix_on_emr.html (original)
+++ phoenix/site/publish/phoenix_on_emr.html Fri Aug 29 00:31:36 2014
@@ -1,7 +1,7 @@
 
 
 
 

Added: phoenix/site/publish/release.html
URL: 
http://svn.apache.org/viewvc/phoenix/site/publish/release.html?rev=1621236&view=auto
===