Repository: phoenix
Updated Branches:
  refs/heads/master 2bbbcfdbe -> e910d8d24


PHOENIX-1725. Extract changes applicable to 4.0 branch (Jeffrey Zhong and 
Devaraj Das)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e910d8d2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e910d8d2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e910d8d2

Branch: refs/heads/master
Commit: e910d8d240df79c73c42a976addc6cfe6da204c2
Parents: 2bbbcfd
Author: Devaraj Das <d...@apache.org>
Authored: Fri Mar 13 10:16:24 2015 -0700
Committer: Devaraj Das <d...@apache.org>
Committed: Fri Mar 13 10:16:24 2015 -0700

----------------------------------------------------------------------
 .../wal/ReadWriteKeyValuesWithCodecIT.java      |  14 +-
 .../org/apache/phoenix/end2end/BaseViewIT.java  |   1 +
 .../end2end/QueryDatabaseMetaDataIT.java        | 216 +++++++++----------
 .../phoenix/end2end/StatsCollectorIT.java       |   1 +
 .../StatsCollectorWithSplitsAndMultiCFIT.java   |   1 +
 .../EndToEndCoveredColumnsIndexBuilderIT.java   |   6 +-
 .../regionserver/IndexSplitTransaction.java     |   2 +-
 .../coprocessor/MetaDataEndpointImpl.java       |   2 +-
 .../coprocessor/SequenceRegionObserver.java     |   2 +-
 .../hbase/index/balancer/IndexLoadBalancer.java |   2 +-
 .../covered/CoveredColumnsIndexBuilder.java     |   2 +-
 .../java/org/apache/phoenix/job/JobManager.java |   5 +-
 .../query/ConnectionQueryServicesImpl.java      |   1 +
 13 files changed, 126 insertions(+), 129 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
index b04f9f2..39eb871 100644
--- 
a/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/hadoop/hbase/regionserver/wal/ReadWriteKeyValuesWithCodecIT.java
@@ -45,7 +45,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
- * Simple test to read/write simple files via our custom {@link WALEditCodec} 
to ensure properly
+ * Simple test to read/write simple files via our custom {@link WALCellCodec} 
to ensure properly
  * encoding/decoding without going through a cluster.
  */
 public class ReadWriteKeyValuesWithCodecIT {
@@ -135,11 +135,11 @@ public class ReadWriteKeyValuesWithCodecIT {
   }
 
   
-  private void writeWALEdit(WALCellCodec codec, List<KeyValue> kvs, 
FSDataOutputStream out) throws IOException {
+  private void writeWALEdit(WALCellCodec codec, List<Cell> kvs, 
FSDataOutputStream out) throws IOException {
     out.writeInt(kvs.size());
     Codec.Encoder cellEncoder = codec.getEncoder(out);
     // We interleave the two lists for code simplicity
-    for (KeyValue kv : kvs) {
+    for (Cell kv : kvs) {
         cellEncoder.write(kv);
     }
   }
@@ -155,7 +155,7 @@ public class ReadWriteKeyValuesWithCodecIT {
     // write the edits out
     FSDataOutputStream out = fs.create(testFile);
     for (WALEdit edit : edits) {
-      writeWALEdit(codec, edit.getKeyValues(), out);
+      writeWALEdit(codec, edit.getCells(), out);
     }
     out.close();
 
@@ -174,9 +174,9 @@ public class ReadWriteKeyValuesWithCodecIT {
     for(int i=0; i< edits.size(); i++){
       WALEdit expected = edits.get(i);
       WALEdit found = read.get(i);
-      for(int j=0; j< expected.getKeyValues().size(); j++){
-        KeyValue fkv = found.getKeyValues().get(j);
-        KeyValue ekv = expected.getKeyValues().get(j);
+      for(int j=0; j< expected.getCells().size(); j++){
+        Cell fkv = found.getCells().get(j);
+        Cell ekv = expected.getCells().get(j);
         assertEquals("KV mismatch for edit! Expected: "+expected+", but found: 
"+found, ekv, fkv);
       }
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
index 19d011f..b9d7180 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BaseViewIT.java
@@ -45,6 +45,7 @@ public abstract class BaseViewIT extends 
BaseOwnClusterHBaseManagedTimeIT {
     public static void doSetup() throws Exception {
         Map<String,String> props = Maps.newHashMapWithExpectedSize(1);
         props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Integer.toString(20));
+        props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(1024));
         setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
     }
     

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
index 0df47b3..44086d7 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryDatabaseMetaDataIT.java
@@ -673,10 +673,7 @@ public class QueryDatabaseMetaDataIT extends 
BaseClientManagedTimeIT {
         try {
             admin.disableTable(htableName);
             admin.deleteTable(htableName);
-            admin.enableTable(htableName);
         } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
-        } finally {
-            admin.close();
         }
         
         HTableDescriptor descriptor = new HTableDescriptor(htableName);
@@ -748,122 +745,115 @@ public class QueryDatabaseMetaDataIT extends 
BaseClientManagedTimeIT {
         } catch (ReadOnlyTableException e) {
             // expected to fail b/c table is read-only
         }
+
+        String upsert = "UPSERT INTO " + MDTEST_NAME + "(id,col1,col2) 
VALUES(?,?,?)";
+        ps = conn2.prepareStatement(upsert);
         try {
-            String upsert = "UPSERT INTO " + MDTEST_NAME + "(id,col1,col2) 
VALUES(?,?,?)";
-            ps = conn2.prepareStatement(upsert);
-            try {
-                ps.setString(1, Integer.toString(0));
-                ps.setInt(2, 1);
-                ps.setInt(3, 2);
-                ps.execute();
-                fail();
-            } catch (ReadOnlyTableException e) {
-                // expected to fail b/c table is read-only
-            }
-            conn2.createStatement().execute("ALTER VIEW " + MDTEST_NAME + " 
SET IMMUTABLE_ROWS=TRUE");
-            
-            HTableInterface htable = 
conn2.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
-            Put put = new Put(Bytes.toBytes("0"));
-            put.add(cfB, Bytes.toBytes("COL1"), ts+6, 
PInteger.INSTANCE.toBytes(1));
-            put.add(cfC, Bytes.toBytes("COL2"), ts+6, 
PLong.INSTANCE.toBytes(2));
-            htable.put(put);
-            conn2.close();
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 10));
-            Connection conn7 = DriverManager.getConnection(getUrl(), props);
-            // Should be ok b/c we've marked the view with IMMUTABLE_ROWS=true
-            conn7.createStatement().execute("CREATE INDEX idx ON " + 
MDTEST_NAME + "(B.COL1)");
-            String select = "SELECT col1 FROM " + MDTEST_NAME + " WHERE 
col2=?";
-            ps = conn7.prepareStatement(select);
-            ps.setInt(1, 2);
-            rs = ps.executeQuery();
-            assertTrue(rs.next());
-            assertEquals(1, rs.getInt(1));
-            assertFalse(rs.next());
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 12));
-            Connection conn75 = DriverManager.getConnection(getUrl(), props);
-            String dropTable = "DROP TABLE " + MDTEST_NAME ;
-            ps = conn75.prepareStatement(dropTable);
-            try {
-                ps.execute();
-                fail();
-            } catch (TableNotFoundException e) {
-                // expected to fail b/c it is a view
-            }
-    
-            String dropView = "DROP VIEW " + MDTEST_NAME ;
-            ps = conn75.prepareStatement(dropView);
+            ps.setString(1, Integer.toString(0));
+            ps.setInt(2, 1);
+            ps.setInt(3, 2);
             ps.execute();
-            conn75.close();
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 15));
-            Connection conn8 = DriverManager.getConnection(getUrl(), props);
-            createStmt = "create view " + MDTEST_NAME + 
-                    "   (id char(1) not null primary key,\n" + 
-                    "    b.col1 integer,\n" +
-                    "    \"c\".col2 bigint) IMMUTABLE_ROWS=true\n";
-            // should be ok to create a view with IMMUTABLE_ROWS = true
-            conn8.createStatement().execute(createStmt);
-            conn8.close();
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 20));
-            Connection conn9 = DriverManager.getConnection(getUrl(), props);
-            conn9.createStatement().execute("CREATE INDEX idx ON " + 
MDTEST_NAME + "(B.COL1)");
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 30));
-            Connection conn91 = DriverManager.getConnection(getUrl(), props);
-            ps = conn91.prepareStatement(dropView);
+            fail();
+        } catch (ReadOnlyTableException e) {
+            // expected to fail b/c table is read-only
+        }
+        conn2.createStatement().execute("ALTER VIEW " + MDTEST_NAME + " SET 
IMMUTABLE_ROWS=TRUE");
+
+        HTableInterface htable = 
conn2.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
+        Put put = new Put(Bytes.toBytes("0"));
+        put.add(cfB, Bytes.toBytes("COL1"), ts+6, 
PInteger.INSTANCE.toBytes(1));
+        put.add(cfC, Bytes.toBytes("COL2"), ts+6, PLong.INSTANCE.toBytes(2));
+        htable.put(put);
+        conn2.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 10));
+        Connection conn7 = DriverManager.getConnection(getUrl(), props);
+        // Should be ok b/c we've marked the view with IMMUTABLE_ROWS=true
+        conn7.createStatement().execute("CREATE INDEX idx ON " + MDTEST_NAME + 
"(B.COL1)");
+        String select = "SELECT col1 FROM " + MDTEST_NAME + " WHERE col2=?";
+        ps = conn7.prepareStatement(select);
+        ps.setInt(1, 2);
+        rs = ps.executeQuery();
+        assertTrue(rs.next());
+        assertEquals(1, rs.getInt(1));
+        assertFalse(rs.next());
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 12));
+        Connection conn75 = DriverManager.getConnection(getUrl(), props);
+        String dropTable = "DROP TABLE " + MDTEST_NAME ;
+        ps = conn75.prepareStatement(dropTable);
+        try {
             ps.execute();
-            conn91.close();
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 35));
-            Connection conn92 = DriverManager.getConnection(getUrl(), props);
-            createStmt = "create view " + MDTEST_NAME + 
-                    "   (id char(1) not null primary key,\n" + 
-                    "    b.col1 integer,\n" +
-                    "    \"c\".col2 bigint) as\n" +
-                    " select * from " + MDTEST_NAME + 
-                    " where b.col1 = 1";
-            conn92.createStatement().execute(createStmt);
-            conn92.close();
-            
-            put = new Put(Bytes.toBytes("1"));
-            put.add(cfB, Bytes.toBytes("COL1"), ts+39, 
PInteger.INSTANCE.toBytes(3));
-            put.add(cfC, Bytes.toBytes("COL2"), ts+39, 
PLong.INSTANCE.toBytes(4));
-            htable.put(put);
+            fail();
+        } catch (TableNotFoundException e) {
+            // expected to fail b/c it is a view
+        }
 
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 40));
-            Connection conn92a = DriverManager.getConnection(getUrl(), props);
-            rs = conn92a.createStatement().executeQuery("select count(*) from 
" + MDTEST_NAME);
-            assertTrue(rs.next());
-            assertEquals(1,rs.getInt(1));
-            conn92a.close();
+        String dropView = "DROP VIEW " + MDTEST_NAME ;
+        ps = conn75.prepareStatement(dropView);
+        ps.execute();
+        conn75.close();
 
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 45));
-            Connection conn93 = DriverManager.getConnection(getUrl(), props);
-            try {
-                String alterView = "alter view " + MDTEST_NAME + " drop column 
b.col1";
-                conn93.createStatement().execute(alterView);
-                fail();
-            } catch (SQLException e) {
-                
assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
-            }
-            conn93.close();
-            
-            props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, 
Long.toString(ts + 50));
-            Connection conn94 = DriverManager.getConnection(getUrl(), props);
-            String alterView = "alter view " + MDTEST_NAME + " drop column 
\"c\".col2";
-            conn94.createStatement().execute(alterView);
-            conn94.close();
-            
-        } finally {
-            HTableInterface htable = 
pconn.getQueryServices().getTable(SchemaUtil.getTableNameAsBytes(MDTEST_SCHEMA_NAME,MDTEST_NAME));
-            Delete delete1 = new Delete(Bytes.toBytes("0"));
-            Delete delete2 = new Delete(Bytes.toBytes("1"));
-            htable.batch(Arrays.asList(delete1, delete2));
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 15));
+        Connection conn8 = DriverManager.getConnection(getUrl(), props);
+        createStmt = "create view " + MDTEST_NAME +
+                "   (id char(1) not null primary key,\n" +
+                "    b.col1 integer,\n" +
+                "    \"c\".col2 bigint) IMMUTABLE_ROWS=true\n";
+        // should be ok to create a view with IMMUTABLE_ROWS = true
+        conn8.createStatement().execute(createStmt);
+        conn8.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 20));
+        Connection conn9 = DriverManager.getConnection(getUrl(), props);
+        conn9.createStatement().execute("CREATE INDEX idx ON " + MDTEST_NAME + 
"(B.COL1)");
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 30));
+        Connection conn91 = DriverManager.getConnection(getUrl(), props);
+        ps = conn91.prepareStatement(dropView);
+        ps.execute();
+        conn91.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 35));
+        Connection conn92 = DriverManager.getConnection(getUrl(), props);
+        createStmt = "create view " + MDTEST_NAME +
+                "   (id char(1) not null primary key,\n" +
+                "    b.col1 integer,\n" +
+                "    \"c\".col2 bigint) as\n" +
+                " select * from " + MDTEST_NAME +
+                " where b.col1 = 1";
+        conn92.createStatement().execute(createStmt);
+        conn92.close();
+
+        put = new Put(Bytes.toBytes("1"));
+        put.add(cfB, Bytes.toBytes("COL1"), ts+39, 
PInteger.INSTANCE.toBytes(3));
+        put.add(cfC, Bytes.toBytes("COL2"), ts+39, PLong.INSTANCE.toBytes(4));
+        htable.put(put);
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 40));
+        Connection conn92a = DriverManager.getConnection(getUrl(), props);
+        rs = conn92a.createStatement().executeQuery("select count(*) from " + 
MDTEST_NAME);
+        assertTrue(rs.next());
+        assertEquals(1,rs.getInt(1));
+        conn92a.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 45));
+        Connection conn93 = DriverManager.getConnection(getUrl(), props);
+        try {
+            String alterView = "alter view " + MDTEST_NAME + " drop column 
b.col1";
+            conn93.createStatement().execute(alterView);
+            fail();
+        } catch (SQLException e) {
+            assertEquals(SQLExceptionCode.CANNOT_MUTATE_TABLE.getErrorCode(), 
e.getErrorCode());
         }
-        
+        conn93.close();
+
+        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts 
+ 50));
+        Connection conn94 = DriverManager.getConnection(getUrl(), props);
+        String alterView = "alter view " + MDTEST_NAME + " drop column 
\"c\".col2";
+        conn94.createStatement().execute(alterView);
+        conn94.close();
+
     }
     
     @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
index 6d3eca5..6c392f5 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorIT.java
@@ -58,6 +58,7 @@ public class StatsCollectorIT extends 
StatsCollectorAbstractIT {
         // Must update config before starting server
         props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Long.toString(20));
         props.put(QueryServices.EXPLAIN_CHUNK_COUNT_ATTRIB, 
Boolean.TRUE.toString());
+        props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(1024));
         setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorWithSplitsAndMultiCFIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorWithSplitsAndMultiCFIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorWithSplitsAndMultiCFIT.java
index 3c0c401..c34d598 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorWithSplitsAndMultiCFIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StatsCollectorWithSplitsAndMultiCFIT.java
@@ -59,6 +59,7 @@ public class StatsCollectorWithSplitsAndMultiCFIT extends 
StatsCollectorAbstract
         // Must update config before starting server
         props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, 
Long.toString(1000));
         props.put(QueryServices.EXPLAIN_CHUNK_COUNT_ATTRIB, 
Boolean.TRUE.toString());
+        props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(1024));
         setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
index 84c6827..880c1d5 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/hbase/index/covered/EndToEndCoveredColumnsIndexBuilderIT.java
@@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdge;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdge;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
 import org.apache.phoenix.hbase.index.IndexTestingUtils;
 import org.apache.phoenix.hbase.index.Indexer;
@@ -325,7 +325,7 @@ public class EndToEndCoveredColumnsIndexBuilderIT {
     EnvironmentEdge edge = new EnvironmentEdge() {
 
       @Override
-      public long currentTimeMillis() {
+      public long currentTime() {
         return ts;
       }
     };

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexSplitTransaction.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexSplitTransaction.java
 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexSplitTransaction.java
index c18521e..048506d 100644
--- 
a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexSplitTransaction.java
+++ 
b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexSplitTransaction.java
@@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.executor.EventType;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CancelableProgressable;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.HasThread;
 import org.apache.hadoop.hbase.util.PairOfSameType;
 import org.apache.hadoop.hbase.zookeeper.ZKAssign;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index e234498..fab1ad0 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -91,7 +91,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegion.RowLock;
 import org.apache.hadoop.hbase.regionserver.RegionScanner;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos;
 import 
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
index c1cf2df..7953933 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegion.RowLock;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.query.QueryConstants;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/balancer/IndexLoadBalancer.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/balancer/IndexLoadBalancer.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/balancer/IndexLoadBalancer.java
index 5bc973c..296ff95 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/balancer/IndexLoadBalancer.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/balancer/IndexLoadBalancer.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.RegionStates;
 import org.apache.hadoop.hbase.master.balancer.StochasticLoadBalancer;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/CoveredColumnsIndexBuilder.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/CoveredColumnsIndexBuilder.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/CoveredColumnsIndexBuilder.java
index 6524fd4..075d4a9 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/CoveredColumnsIndexBuilder.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/CoveredColumnsIndexBuilder.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.phoenix.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Pair;
 
 import com.google.common.collect.Lists;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java 
b/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
index 35c95c8..31ef742 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/job/JobManager.java
@@ -55,7 +55,10 @@ public class JobManager<T> extends 
AbstractRoundRobinQueue<T> {
 
        @Override
     protected Object extractProducer(T o) {
-        return ((JobFutureTask)o).getJobId();
+        if( o instanceof  JobFutureTask){
+            return ((JobFutureTask)o).getJobId();
+        }
+        return o;
     }        
 
     public static interface JobRunnable<T> extends Runnable {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/e910d8d2/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
----------------------------------------------------------------------
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index b149b92..d6f844c 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -2118,6 +2118,7 @@ public class ConnectionQueryServicesImpl extends 
DelegateQueryServices implement
             Append append = sequence.createSequence(startWith, incrementBy, 
cacheSize, timestamp, minValue, maxValue, cycle);
             HTableInterface htable =
                     
this.getTable(PhoenixDatabaseMetaData.SEQUENCE_FULLNAME_BYTES);
+            htable.setAutoFlush(true);
             try {
                 Result result = htable.append(append);
                 return sequence.createSequence(result, minValue, maxValue, 
cycle);

Reply via email to