Repository: falcon Updated Branches: refs/heads/master 13bc6b6a3 -> 4b0a920f6
http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java index 71616e9..5faf5b5 100644 --- a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java +++ b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java @@ -28,6 +28,7 @@ import org.apache.hive.hcatalog.api.HCatCreateDBDesc; import org.apache.hive.hcatalog.api.HCatCreateTableDesc; import org.apache.hive.hcatalog.api.HCatPartition; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; @@ -179,9 +180,9 @@ public class HiveCatalogServiceIT { hiveCatalogService.isAlive(conf, "thrift://localhost:9999"); } - @Test (expectedExceptions = FalconException.class) + @Test public void testTableExistsNegative() throws Exception { - hiveCatalogService.tableExists(conf, METASTORE_URL, DATABASE_NAME, "blah"); + Assert.assertFalse(hiveCatalogService.tableExists(conf, METASTORE_URL, DATABASE_NAME, "blah")); } @Test @@ -247,6 +248,20 @@ public class HiveCatalogServiceIT { } @Test + public void testListPartititions() throws FalconException { + List<String> filters = new ArrayList<String>(); + filters.add("20130903"); + List<CatalogPartition> partitions = hiveCatalogService.listPartitions( + conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, filters); + Assert.assertEquals(partitions.size(), 2); + + filters.add("us"); + partitions = hiveCatalogService.listPartitions( + conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, filters); + Assert.assertEquals(partitions.size(), 1); + } + + @Test public void testGetPartitionsFullSpec() throws Exception { Map<String, String> partitionSpec = new HashMap<String, String>(); partitionSpec.put("ds", "20130902"); @@ -329,4 +344,46 @@ public class HiveCatalogServiceIT { {EXTERNAL_TABLE_NAME}, }; } + + @Test + public void testGetPartitionColumns() throws FalconException { + AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService(); + List<String> columns = catalogService.getPartitionColumns(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME); + Assert.assertEquals(columns, Arrays.asList("ds", "region")); + } + + @Test + public void testAddPartition() throws FalconException { + AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService(); + List<String> partitionValues = Arrays.asList("20130902", "us"); + String location = EXTERNAL_TABLE_LOCATION + "/20130902"; + catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location); + CatalogPartition partition = + catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues); + Assert.assertEquals(partition.getLocation(), location); + + try { + catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location); + } catch (FalconException e) { + if (!(e.getCause() instanceof AlreadyExistsException)) { + Assert.fail("Expected FalconException(AlreadyExistsException)"); + } + } + } + + @Test + public void testUpdatePartition() throws FalconException { + AbstractCatalogService catalogService = CatalogServiceFactory.getCatalogService(); + List<String> partitionValues = Arrays.asList("20130902", "us"); + String location = EXTERNAL_TABLE_LOCATION + "/20130902"; + catalogService.addPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location); + CatalogPartition partition = + catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues); + Assert.assertEquals(partition.getLocation(), location); + + String location2 = location + "updated"; + catalogService.updatePartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues, location2); + partition = catalogService.getPartition(conf, METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionValues); + Assert.assertEquals(partition.getLocation(), location2); + } } http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java index 6982b65..2a14ae4 100644 --- a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java +++ b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java @@ -34,7 +34,6 @@ import org.apache.hive.hcatalog.api.HCatClient; import org.apache.hive.hcatalog.api.HCatPartition; import org.apache.hive.hcatalog.common.HCatException; import org.testng.Assert; -import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -89,6 +88,12 @@ public class TableStorageFeedEvictorIT { client = TestContext.getHCatClient(METASTORE_URL); + HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, EXTERNAL_TABLE_NAME); + HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, TABLE_NAME); + HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_EXTERNAL_TABLE_NAME); + HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_TABLE_NAME); + HiveTestUtils.dropDatabase(METASTORE_URL, DATABASE_NAME); + HiveTestUtils.createDatabase(METASTORE_URL, DATABASE_NAME); final List<String> partitionKeys = Arrays.asList("ds", "region"); HiveTestUtils.createTable(METASTORE_URL, DATABASE_NAME, TABLE_NAME, partitionKeys); @@ -101,15 +106,6 @@ public class TableStorageFeedEvictorIT { multiColDatedPartitionKeys, MULTI_COL_DATED_EXTERNAL_TABLE_LOCATION); } - @AfterClass - public void close() throws Exception { - HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, EXTERNAL_TABLE_NAME); - HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, TABLE_NAME); - HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_EXTERNAL_TABLE_NAME); - HiveTestUtils.dropTable(METASTORE_URL, DATABASE_NAME, MULTI_COL_DATED_TABLE_NAME); - HiveTestUtils.dropDatabase(METASTORE_URL, DATABASE_NAME); - } - @DataProvider (name = "evictorTestDataProvider") private Object[][] createEvictorTestData() { return new Object[][] { http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/java/org/apache/falcon/util/HiveTestUtils.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/util/HiveTestUtils.java b/webapp/src/test/java/org/apache/falcon/util/HiveTestUtils.java index 19274b9..28c3807 100644 --- a/webapp/src/test/java/org/apache/falcon/util/HiveTestUtils.java +++ b/webapp/src/test/java/org/apache/falcon/util/HiveTestUtils.java @@ -184,4 +184,13 @@ public final class HiveTestUtils { return TestContext.getHCatClient(metastoreUrl).getPartition(databaseName, tableName, partitionSpec); } + + public static List<HCatPartition> getPartitions(String metastoreUrl, String databaseName, + String tableName, String partitionKey, + String partitionValue) throws Exception { + Map<String, String> partitionSpec = new HashMap<String, String>(); + partitionSpec.put(partitionKey, partitionValue); + + return TestContext.getHCatClient(metastoreUrl).getPartitions(databaseName, tableName, partitionSpec); + } } http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java b/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java index e67fe2a..02d1011 100644 --- a/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java +++ b/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java @@ -26,6 +26,7 @@ import org.apache.falcon.logging.JobLogMover; import org.apache.falcon.resource.TestContext; import org.apache.falcon.workflow.WorkflowExecutionContext; import org.apache.falcon.workflow.engine.OozieClientFactory; +import org.apache.falcon.workflow.engine.OozieWorkflowEngine; import org.apache.hadoop.fs.Path; import org.apache.oozie.client.BundleJob; import org.apache.oozie.client.CoordinatorJob; @@ -82,15 +83,30 @@ public final class OozieTestUtils { return false; } - public static void waitForProcessWFtoStart(TestContext context) throws Exception { - waitForWorkflowStart(context, context.getProcessName()); + public static List<WorkflowJob> waitForProcessWFtoStart(TestContext context) throws Exception { + return waitForWorkflowStart(context, context.getProcessName()); } - public static void waitForWorkflowStart(TestContext context, String entityName) throws Exception { + public static void waitForInstanceToComplete(TestContext context, String jobId) throws Exception { + ProxyOozieClient ozClient = getOozieClient(context); + String lastStatus = null; + for (int i = 0; i < 50; i++) { + WorkflowJob job = ozClient.getJobInfo(jobId); + lastStatus = job.getStatus().name(); + if (OozieWorkflowEngine.WF_RERUN_PRECOND.contains(job.getStatus())) { + return; + } + System.out.println("Waiting for workflow to start"); + Thread.sleep(i * 1000); + } + throw new Exception("Instance " + jobId + " hans't completed. Last known state " + lastStatus); + } + + public static List<WorkflowJob> waitForWorkflowStart(TestContext context, String entityName) throws Exception { for (int i = 0; i < 10; i++) { List<WorkflowJob> jobs = getRunningJobs(context, entityName); if (jobs != null && !jobs.isEmpty()) { - return; + return jobs; } System.out.println("Waiting for workflow to start"); http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/resources/cluster-template.xml ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/cluster-template.xml b/webapp/src/test/resources/cluster-template.xml index 16b7c8c..2d54ece 100644 --- a/webapp/src/test/resources/cluster-template.xml +++ b/webapp/src/test/resources/cluster-template.xml @@ -27,7 +27,7 @@ <interface type="workflow" endpoint="http://localhost:41000/oozie/" version="3.1"/> <interface type="messaging" endpoint="tcp://localhost:61616?daemon=true" - version="5.1.6"/> + version="5.4.3"/> <interface type="registry" endpoint="thrift://localhost:49083" version="0.11.0"/> </interfaces> http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/resources/feed-template1.xml ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/feed-template1.xml b/webapp/src/test/resources/feed-template1.xml index 456f7ce..f5cb6a0 100644 --- a/webapp/src/test/resources/feed-template1.xml +++ b/webapp/src/test/resources/feed-template1.xml @@ -16,8 +16,7 @@ See the License for the specific language governing permissions and limitations under the License. --> -<feed description="clicks log" name="##inputFeedName##" xmlns="uri:falcon:feed:0.1" - > +<feed description="clicks log" name="##inputFeedName##" xmlns="uri:falcon:feed:0.1"> <partitions> <partition name="timestamp"/> </partitions> http://git-wip-us.apache.org/repos/asf/falcon/blob/4b0a920f/webapp/src/test/resources/feed-template2.xml ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/feed-template2.xml b/webapp/src/test/resources/feed-template2.xml index d4901fa..dcfebce 100644 --- a/webapp/src/test/resources/feed-template2.xml +++ b/webapp/src/test/resources/feed-template2.xml @@ -16,8 +16,7 @@ See the License for the specific language governing permissions and limitations under the License. --> -<feed description="clicks log" name="##outputFeedName##" xmlns="uri:falcon:feed:0.1" - > +<feed description="clicks log" name="##outputFeedName##" xmlns="uri:falcon:feed:0.1"> <groups>output</groups> <frequency>days(1)</frequency> @@ -40,4 +39,8 @@ <ACL owner="##user##" group="group" permission="0x755"/> <schema location="/schema/clicks" provider="protobuf"/> + + <properties> + <property name="catalog.table" value="catalog:falcon_db:output_table#ds={YEAR}-{MONTH}-{DAY}"/> + </properties> </feed>
