PHOENIX-3253 Make changes to tests to support method level parallelization
Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/976c97ac Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/976c97ac Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/976c97ac Branch: refs/heads/4.x-HBase-1.1 Commit: 976c97ac085a8b96e40e7da05740568b2c4757a7 Parents: 7f1ccc2 Author: James Taylor <jamestay...@apache.org> Authored: Sun Oct 2 12:47:34 2016 -0700 Committer: James Taylor <jamestay...@apache.org> Committed: Mon Oct 3 09:20:31 2016 -0700 ---------------------------------------------------------------------- .../apache/phoenix/end2end/AlterTableIT.java | 49 ---------- .../phoenix/end2end/FlappingAlterTableIT.java | 97 ++++++++++++++++++++ pom.xml | 28 +++--- 3 files changed, 114 insertions(+), 60 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/976c97ac/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java index 0125a63..48f4217 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableIT.java @@ -1083,30 +1083,6 @@ public class AlterTableIT extends ParallelStatsDisabledIT { } @Test - public void testAddColumnForNewColumnFamily() throws Exception { - Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); - String ddl = "CREATE TABLE " + dataTableFullName + " (\n" - +"ID1 VARCHAR(15) NOT NULL,\n" - +"ID2 VARCHAR(15) NOT NULL,\n" - +"CREATED_DATE DATE,\n" - +"CREATION_TIME BIGINT,\n" - +"LAST_USED DATE,\n" - +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) SALT_BUCKETS = 8"; - Connection conn1 = DriverManager.getConnection(getUrl(), props); - conn1.createStatement().execute(ddl); - ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR"; - conn1.createStatement().execute(ddl); - try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { - HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)).getColumnFamilies(); - assertEquals(2, columnFamilies.length); - assertEquals("0", columnFamilies[0].getNameAsString()); - assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[0].getTimeToLive()); - assertEquals("CF", columnFamilies[1].getNameAsString()); - assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[1].getTimeToLive()); - } - } - - @Test public void testSetHColumnProperties() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String ddl = "CREATE TABLE " + dataTableFullName + " (\n" @@ -1414,31 +1390,6 @@ public class AlterTableIT extends ParallelStatsDisabledIT { } } - @Test - public void testNewColumnFamilyInheritsTTLOfEmptyCF() throws Exception { - Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); - String ddl = "CREATE TABLE " + dataTableFullName + " (\n" - +"ID1 VARCHAR(15) NOT NULL,\n" - +"ID2 VARCHAR(15) NOT NULL,\n" - +"CREATED_DATE DATE,\n" - +"CREATION_TIME BIGINT,\n" - +"LAST_USED DATE,\n" - +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) SALT_BUCKETS = 8, TTL = 1000"; - Connection conn1 = DriverManager.getConnection(getUrl(), props); - conn1.createStatement().execute(ddl); - ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR"; - conn1.createStatement().execute(ddl); - try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { - HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)); - HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); - assertEquals(2, columnFamilies.length); - assertEquals("0", columnFamilies[0].getNameAsString()); - assertEquals(1000, columnFamilies[0].getTimeToLive()); - assertEquals("CF", columnFamilies[1].getNameAsString()); - assertEquals(1000, columnFamilies[1].getTimeToLive()); - } - } - private static void assertImmutableRows(Connection conn, String fullTableName, boolean expectedValue) throws SQLException { PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class); assertEquals(expectedValue, pconn.getTable(new PTableKey(pconn.getTenantId(), fullTableName)).isImmutableRows()); http://git-wip-us.apache.org/repos/asf/phoenix/blob/976c97ac/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java new file mode 100644 index 0000000..a57088a --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/FlappingAlterTableIT.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; +import static org.junit.Assert.assertEquals; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.util.Properties; + +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.phoenix.jdbc.PhoenixConnection; +import org.apache.phoenix.util.PropertiesUtil; +import org.apache.phoenix.util.SchemaUtil; +import org.junit.Before; +import org.junit.Test; + +public class FlappingAlterTableIT extends ParallelStatsDisabledIT { + private String dataTableFullName; + + @Before + public void setupTableNames() throws Exception { + String schemaName = ""; + String dataTableName = generateUniqueName(); + dataTableFullName = SchemaUtil.getTableName(schemaName, dataTableName); + } + + @Test + public void testAddColumnForNewColumnFamily() throws Exception { + Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); + String ddl = "CREATE TABLE " + dataTableFullName + " (\n" + +"ID1 VARCHAR(15) NOT NULL,\n" + +"ID2 VARCHAR(15) NOT NULL,\n" + +"CREATED_DATE DATE,\n" + +"CREATION_TIME BIGINT,\n" + +"LAST_USED DATE,\n" + +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) SALT_BUCKETS = 8"; + Connection conn1 = DriverManager.getConnection(getUrl(), props); + conn1.createStatement().execute(ddl); + ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR"; + conn1.createStatement().execute(ddl); + try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { + HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)).getColumnFamilies(); + assertEquals(2, columnFamilies.length); + assertEquals("0", columnFamilies[0].getNameAsString()); + assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[0].getTimeToLive()); + assertEquals("CF", columnFamilies[1].getNameAsString()); + assertEquals(HColumnDescriptor.DEFAULT_TTL, columnFamilies[1].getTimeToLive()); + } + } + + @Test + public void testNewColumnFamilyInheritsTTLOfEmptyCF() throws Exception { + Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); + String ddl = "CREATE TABLE " + dataTableFullName + " (\n" + +"ID1 VARCHAR(15) NOT NULL,\n" + +"ID2 VARCHAR(15) NOT NULL,\n" + +"CREATED_DATE DATE,\n" + +"CREATION_TIME BIGINT,\n" + +"LAST_USED DATE,\n" + +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) SALT_BUCKETS = 8, TTL = 1000"; + Connection conn1 = DriverManager.getConnection(getUrl(), props); + conn1.createStatement().execute(ddl); + ddl = "ALTER TABLE " + dataTableFullName + " ADD CF.STRING VARCHAR"; + conn1.createStatement().execute(ddl); + try (HBaseAdmin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { + HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)); + HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); + assertEquals(2, columnFamilies.length); + assertEquals("0", columnFamilies[0].getNameAsString()); + assertEquals(1000, columnFamilies[0].getTimeToLive()); + assertEquals("CF", columnFamilies[1].getNameAsString()); + assertEquals(1000, columnFamilies[1].getTimeToLive()); + } + } + + +} http://git-wip-us.apache.org/repos/asf/phoenix/blob/976c97ac/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index 46dba49..400975d 100644 --- a/pom.xml +++ b/pom.xml @@ -235,9 +235,8 @@ <reuseForks>true</reuseForks> <runOrder>alphabetical</runOrder> <!--parallel>methods</parallel> - <threadCount>20</threadCount> - <enableAssertions>false</enableAssertions--> - <argLine>-Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68</argLine> + <threadCount>20</threadCount--> + <argLine>-Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <shutdown>kill</shutdown> <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory> @@ -263,9 +262,16 @@ <reuseForks>true</reuseForks> <runOrder>alphabetical</runOrder> <!--parallel>methods</parallel> - <threadCount>20</threadCount> - <enableAssertions>false</enableAssertions--> - <argLine>-Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68</argLine> + <threadCount>20</threadCount--> + <!-- We're intermittantly hitting this assertion: + Caused by: java.lang.AssertionError: we should never remove a different context + at org.apache.hadoop.hbase.regionserver.HRegion$RowLockContext.cleanUp(HRegion.java:5206) + at org.apache.hadoop.hbase.regionserver.HRegion$RowLockImpl.release(HRegion.java:5246) + at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2898) + at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2835) + at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:490) --> + <!--enableAssertions>false</enableAssertions--> + <argLine>-Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <shutdown>kill</shutdown> <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory> @@ -289,7 +295,7 @@ <forkCount>${numForkedIT}</forkCount> <runOrder>alphabetical</runOrder> <reuseForks>true</reuseForks> - <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"</argLine> + <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory> <groups>org.apache.phoenix.end2end.ClientManagedTimeTest</groups> @@ -313,7 +319,7 @@ <forkCount>${numForkedIT}</forkCount> <runOrder>alphabetical</runOrder> <reuseForks>true</reuseForks> - <argLine>-enableassertions -Xmx3072m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:NewRatio=4 -XX:SurvivorRatio=8 -XX:+UseCompressedOops -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+DisableExplicitGC -XX:+UseCMSInitiatingOccupancyOnly -XX:+CMSClassUnloadingEnabled -XX:+CMSScavengeBeforeRemark -XX:CMSInitiatingOccupancyFraction=68</argLine> + <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=128m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory> <groups>org.apache.phoenix.end2end.HBaseManagedTimeTest</groups> @@ -331,7 +337,7 @@ <forkCount>${numForkedIT}</forkCount> <runOrder>alphabetical</runOrder> <reuseForks>false</reuseForks> - <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"</argLine> + <argLine>-enableassertions -Xmx2000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory> <groups>org.apache.phoenix.end2end.NeedsOwnMiniClusterTest</groups> @@ -446,8 +452,8 @@ <configuration> <forkCount>${numForkedUT}</forkCount> <reuseForks>true</reuseForks> - <argLine>-enableassertions -Xmx2250m -XX:MaxPermSize=128m - -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"</argLine> + <argLine>-enableassertions -Xmx2250m -XX:MaxPermSize=128m + -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine> <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile> <shutdown>kill</shutdown> </configuration>