http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java ---------------------------------------------------------------------- diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java deleted file mode 100644 index 53f3df8..0000000 --- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSubmitTest.java +++ /dev/null @@ -1,163 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.regression; - - -import org.apache.commons.httpclient.HttpStatus; -import org.apache.falcon.entity.v0.feed.LocationType; -import org.apache.falcon.regression.Entities.FeedMerlin; -import org.apache.falcon.regression.core.bundle.Bundle; -import org.apache.falcon.regression.core.helpers.ColoHelper; -import org.apache.falcon.regression.core.response.ServiceResponse; -import org.apache.falcon.regression.core.util.AssertUtil; -import org.apache.falcon.regression.core.util.BundleUtil; -import org.apache.falcon.regression.testHelper.BaseTestClass; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - - -/** - * Feed submission tests. - */ -@Test(groups = "embedded") -public class FeedSubmitTest extends BaseTestClass { - - private ColoHelper cluster = servers.get(0); - private String feed; - - @BeforeMethod(alwaysRun = true) - public void setUp() throws Exception { - bundles[0] = BundleUtil.readELBundle(); - bundles[0].generateUniqueBundle(this); - bundles[0] = new Bundle(bundles[0], cluster); - - //submit the cluster - ServiceResponse response = - prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0)); - AssertUtil.assertSucceeded(response); - feed = bundles[0].getInputFeedFromBundle(); - } - - @AfterMethod(alwaysRun = true) - public void tearDown() { - removeTestClassEntities(); - } - - /** - * Submit correctly adjusted feed. Response should reflect success. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitValidFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - } - - /** - * Submit and remove feed. Try to submit it again. Response should reflect success. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitValidFeedPostDeletion() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().delete(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - } - - /** - * Submit feed. Get its definition. Try to submit it again. Should succeed. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitValidFeedPostGet() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().getEntityDefinition(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - } - - /** - * Try to submit correctly adjusted feed twice. Should succeed. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitValidFeedTwice() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - } - - /** - * Submit a feed with the path for location-data type empty. Feed submit should fail. - * * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitFeedWithEmptyDataPath() throws Exception { - FeedMerlin feedObj = new FeedMerlin(feed); - feedObj.setLocation(LocationType.DATA, ""); - ServiceResponse response = prism.getFeedHelper().submitEntity(feedObj.toString()); - AssertUtil.assertFailedWithStatus(response, HttpStatus.SC_BAD_REQUEST, - "Can not create a Path from an empty string"); - } - - /** - * Submit a feed no location type stats. Feed submit should succeed. - * - * @throws Exception - */ - - @Test(groups = {"singleCluster"}) - public void submitFeedWithNoStatsPath() throws Exception { - FeedMerlin feedObj = new FeedMerlin(feed); - feedObj.getLocations().getLocations().set(1, null); - ServiceResponse response = prism.getFeedHelper().submitEntity(feedObj.toString()); - AssertUtil.assertSucceeded(response); - } - - /** - * Submit a feed with no location type data. Feed submit should fail. - * * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void submitFeedWithNoDataPath() throws Exception { - FeedMerlin feedObj = new FeedMerlin(feed); - feedObj.getLocations().getLocations().set(0, null); - ServiceResponse response = prism.getFeedHelper().submitEntity(feedObj.toString()); - AssertUtil.assertFailedWithStatus(response, HttpStatus.SC_BAD_REQUEST, - "FileSystem based feed but it doesn't contain location type - data"); - } -}
http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java ---------------------------------------------------------------------- diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java deleted file mode 100644 index 5217818..0000000 --- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedSuspendTest.java +++ /dev/null @@ -1,141 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.regression; - -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.regression.core.bundle.Bundle; -import org.apache.falcon.regression.core.helpers.ColoHelper; -import org.apache.falcon.regression.core.response.ServiceResponse; -import org.apache.falcon.regression.core.util.AssertUtil; -import org.apache.falcon.regression.core.util.BundleUtil; -import org.apache.falcon.regression.testHelper.BaseTestClass; -import org.apache.oozie.client.Job; -import org.apache.oozie.client.OozieClient; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - - -/** - * Feed suspend tests. - */ -@Test(groups = "embedded") -public class FeedSuspendTest extends BaseTestClass { - - private ColoHelper cluster = servers.get(0); - private OozieClient clusterOC = serverOC.get(0); - private String feed; - - @BeforeMethod(alwaysRun = true) - public void setUp() throws Exception { - bundles[0] = BundleUtil.readELBundle(); - bundles[0].generateUniqueBundle(this); - bundles[0] = new Bundle(bundles[0], cluster); - - //submit the cluster - ServiceResponse response = - prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0)); - AssertUtil.assertSucceeded(response); - - feed = bundles[0].getInputFeedFromBundle(); - } - - @AfterMethod(alwaysRun = true) - public void tearDown() { - removeTestClassEntities(); - } - - /** - * Schedule feed, suspend it. Check that web response reflects success and feed status is - * "suspended". - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void suspendScheduledFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitAndSchedule(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().suspend(feed); - AssertUtil.assertSucceeded(response); - AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED); - } - - /** - * Try to suspend running feed twice. Response should reflect success, - * feed status should be suspended. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void suspendAlreadySuspendedFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitAndSchedule(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().suspend(feed); - AssertUtil.assertSucceeded(response); - AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED); - response = prism.getFeedHelper().suspend(feed); - - AssertUtil.assertSucceeded(response); - AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED); - } - - /** - * Remove feed. Attempt to suspend it should fail. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void suspendDeletedFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitAndSchedule(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().delete(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().suspend(feed); - AssertUtil.assertFailed(response); - } - - /** - * Attempt to suspend non existent feed should fail. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void suspendNonExistentFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().suspend(feed); - AssertUtil.assertFailed(response); - } - - /** - * Attempt to suspend non scheduled feed should fail. - * - * @throws Exception - */ - @Test(groups = {"singleCluster"}) - public void suspendSubmittedFeed() throws Exception { - ServiceResponse response = prism.getFeedHelper().submitEntity(feed); - AssertUtil.assertSucceeded(response); - - response = prism.getFeedHelper().suspend(feed); - AssertUtil.assertFailed(response); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java ---------------------------------------------------------------------- diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java deleted file mode 100644 index 33808bf..0000000 --- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceParamTest.java +++ /dev/null @@ -1,165 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.regression; - -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.feed.ClusterType; -import org.apache.falcon.regression.core.bundle.Bundle; -import org.apache.falcon.regression.core.helpers.ColoHelper; -import org.apache.falcon.regression.core.util.BundleUtil; -import org.apache.falcon.regression.core.util.HadoopUtil; -import org.apache.falcon.regression.core.util.InstanceUtil; -import org.apache.falcon.regression.core.util.OSUtil; -import org.apache.falcon.regression.core.util.OozieUtil; -import org.apache.falcon.regression.core.util.TimeUtil; -import org.apache.falcon.regression.core.util.Util; -import org.apache.falcon.regression.testHelper.BaseTestClass; -import org.apache.falcon.resource.InstancesResult; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.log4j.Logger; -import org.apache.oozie.client.CoordinatorAction; -import org.apache.oozie.client.OozieClient; -import org.apache.oozie.client.OozieClientException; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.xml.bind.JAXBException; -import java.io.IOException; -import java.net.URISyntaxException; - -/** - * tests for instance option params. - */ -@Test(groups = "embedded") -public class InstanceParamTest extends BaseTestClass { - - /** - * test cases for https://issues.apache.org/jira/browse/FALCON-263. - */ - - private String baseTestHDFSDir = cleanAndGetTestDir(); - private String feedInputPath = baseTestHDFSDir + "/testInputData" + MINUTE_DATE_PATTERN; - private String feedOutputPath = baseTestHDFSDir + "/testOutputData" + MINUTE_DATE_PATTERN; - private String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator"; - private String startTime; - private String endTime; - private ColoHelper cluster1 = servers.get(0); - private OozieClient cluster1OC = serverOC.get(0); - private Bundle processBundle; - private static final Logger LOGGER = Logger.getLogger(InstanceParamTest.class); - private String processName; - - @BeforeClass(alwaysRun = true) - public void createTestData() throws Exception { - uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE); - startTime = TimeUtil.get20roundedTime(TimeUtil.getTimeWrtSystemTime(-20)); - endTime = TimeUtil.getTimeWrtSystemTime(60); - } - - @BeforeMethod(alwaysRun = true) - public void setup() throws Exception { - processBundle = new Bundle(BundleUtil.readELBundle(), cluster1); - processBundle.generateUniqueBundle(this); - processBundle.setInputFeedDataPath(feedInputPath); - processBundle.setOutputFeedLocationData(feedOutputPath); - processBundle.setProcessWorkflow(aggregateWorkflowDir); - for (int i = 0; i < 3; i++) { - bundles[i] = new Bundle(BundleUtil.readELBundle(), servers.get(i)); - bundles[i].generateUniqueBundle(this); - bundles[i].setInputFeedDataPath(feedInputPath); - bundles[i].setOutputFeedLocationData(feedOutputPath); - bundles[i].setProcessWorkflow(aggregateWorkflowDir); - } - processName = processBundle.getProcessName(); - } - - /** - * Schedule process. Get params of waiting instance. - */ - @Test(timeOut = 1200000, enabled = false) - public void getParamsValidRequestInstanceWaiting() - throws URISyntaxException, JAXBException, AuthenticationException, IOException, - OozieClientException, InterruptedException { - processBundle.setProcessValidity(startTime, endTime); - processBundle.addClusterToBundle(bundles[1].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.addClusterToBundle(bundles[2].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.submitFeedsScheduleProcess(prism); - InstanceUtil.waitTillInstancesAreCreated(cluster1OC, processBundle.getProcessData(), 0); - InstancesResult r = prism.getProcessHelper().getInstanceParams(processName, - "?start=" + startTime); - r.getMessage(); - } - - /** - * Schedule process. Wait till instance succeeded. Get its params. - */ - @Test(timeOut = 1200000, enabled = true) - public void getParamsValidRequestInstanceSucceeded() - throws URISyntaxException, JAXBException, AuthenticationException, IOException, - OozieClientException, InterruptedException { - processBundle.setProcessValidity(startTime, endTime); - processBundle.addClusterToBundle(bundles[1].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.addClusterToBundle(bundles[2].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.submitFeedsScheduleProcess(prism); - InstanceUtil.waitTillInstancesAreCreated(cluster1OC, processBundle.getProcessData(), 0); - OozieUtil.createMissingDependencies(cluster1, EntityType.PROCESS, processName, 0); - InstanceUtil.waitTillInstanceReachState(cluster1OC, processName, 1, - CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS, 10); - InstancesResult r = prism.getProcessHelper() - .getInstanceParams(processName, "?start=" + startTime); - LOGGER.info(r.getMessage()); - } - - /** - * Schedule process. Wait till instance got killed. Get its params. - */ - @Test(timeOut = 1200000, enabled = false) - public void getParamsValidRequestInstanceKilled() - throws URISyntaxException, JAXBException, AuthenticationException, IOException, - OozieClientException, InterruptedException { - processBundle.setProcessValidity(startTime, endTime); - processBundle.addClusterToBundle(bundles[1].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.addClusterToBundle(bundles[2].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.submitFeedsScheduleProcess(prism); - InstanceUtil.waitTillInstancesAreCreated(cluster1OC, processBundle.getProcessData(), 0); - OozieUtil.createMissingDependencies(cluster1, EntityType.PROCESS, processName, 0); - InstanceUtil.waitTillInstanceReachState(cluster1OC, processName, 0, - CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS); //change according to test case - InstancesResult r = prism.getProcessHelper() - .getInstanceParams(processName, "?start=" + startTime); - r.getMessage(); - } - - @AfterMethod(alwaysRun = true) - public void tearDown() throws IOException { - removeTestClassEntities(); - for (FileSystem fs : serverFS) { - HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java ---------------------------------------------------------------------- diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java deleted file mode 100644 index 137491d..0000000 --- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java +++ /dev/null @@ -1,267 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.regression; - -import org.apache.falcon.regression.Entities.FeedMerlin; -import org.apache.falcon.regression.core.bundle.Bundle; -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.feed.ActionType; -import org.apache.falcon.entity.v0.feed.ClusterType; -import org.apache.falcon.regression.core.helpers.ColoHelper; -import org.apache.falcon.regression.core.util.BundleUtil; -import org.apache.falcon.regression.core.util.HadoopUtil; -import org.apache.falcon.regression.core.util.InstanceUtil; -import org.apache.falcon.regression.core.util.OSUtil; -import org.apache.falcon.regression.core.util.TimeUtil; -import org.apache.falcon.regression.core.util.Util; -import org.apache.falcon.regression.testHelper.BaseTestClass; -import org.apache.falcon.resource.InstancesSummaryResult; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.security.authentication.client.AuthenticationException; -import org.apache.oozie.client.CoordinatorAction.Status; -import org.apache.oozie.client.OozieClientException; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.xml.bind.JAXBException; -import java.io.IOException; -import java.net.URISyntaxException; -import java.text.ParseException; -import java.util.List; - -/** This test currently provide minimum verification. More detailed test should be added: - 1. process : test summary single cluster few instance some future some past - 2. process : test multiple cluster, full past on one cluster, full future on one cluster, - half future / past on third one - 3. feed : same as test 1 for feed - 4. feed : same as test 2 for feed - */ -@Test(groups = "embedded") -public class InstanceSummaryTest extends BaseTestClass { - - private String baseTestHDFSDir = cleanAndGetTestDir(); - private String feedInputPath = baseTestHDFSDir + "/testInputData" + MINUTE_DATE_PATTERN; - private String aggregateWorkflowDir = baseTestHDFSDir + "/aggregator"; - private String startTime; - private String endTime; - private ColoHelper cluster3 = servers.get(2); - private Bundle processBundle; - private String processName; - - @BeforeClass(alwaysRun = true) - public void createTestData() throws Exception { - uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE); - startTime = TimeUtil.get20roundedTime(TimeUtil.getTimeWrtSystemTime(-20)); - endTime = TimeUtil.getTimeWrtSystemTime(60); - String startTimeData = TimeUtil.addMinsToTime(startTime, -100); - List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startTimeData, endTime, 20); - for (FileSystem fs : serverFS) { - HadoopUtil.deleteDirIfExists(Util.getPathPrefix(feedInputPath), fs); - HadoopUtil.flattenAndPutDataInFolder(fs, OSUtil.NORMAL_INPUT, - Util.getPathPrefix(feedInputPath), dataDates); - } - } - - @BeforeMethod(alwaysRun = true) - public void setup() throws Exception { - processBundle = new Bundle(BundleUtil.readELBundle(), cluster3); - processBundle.generateUniqueBundle(this); - processBundle.setInputFeedDataPath(feedInputPath); - processBundle.setOutputFeedLocationData(baseTestHDFSDir + "/output" + MINUTE_DATE_PATTERN); - processBundle.setProcessWorkflow(aggregateWorkflowDir); - - for (int i = 0; i < 3; i++) { - bundles[i] = new Bundle(BundleUtil.readELBundle(), servers.get(i)); - bundles[i].generateUniqueBundle(this); - bundles[i].setProcessWorkflow(aggregateWorkflowDir); - } - processName = Util.readEntityName(processBundle.getProcessData()); - } - - /** - * Schedule single-cluster process. Get its instances summary. - */ - @Test(enabled = true, timeOut = 1200000) - public void testSummarySingleClusterProcess() - throws URISyntaxException, JAXBException, IOException, ParseException, - OozieClientException, AuthenticationException, InterruptedException { - processBundle.setProcessValidity(startTime, endTime); - processBundle.submitFeedsScheduleProcess(prism); - InstanceUtil.waitTillInstancesAreCreated(serverOC.get(2), processBundle.getProcessData(), 0); - - // start only at start time - InstancesSummaryResult r = prism.getProcessHelper() - .getInstanceSummary(processName, "?start=" + startTime); - InstanceUtil.waitTillInstanceReachState(serverOC.get(2), processName, 2, - Status.SUCCEEDED, EntityType.PROCESS); - - //AssertUtil.assertSucceeded(r); - - //start only before process start - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, -100)); - //AssertUtil.assertFailed(r,"response should have failed"); - - //start only after process end - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 120)); - - - //start only at mid specific instance - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 10)); - - //start only in between 2 instance - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 7)); - - //start and end at start and end - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - //start in between and end at end - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 14) + "&end=" + endTime); - - //start at start and end between - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime, -20)); - - // start and end in between - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 20) - + "&end=" + TimeUtil.addMinsToTime(endTime, -13)); - - //start before start with end in between - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, -100) - + "&end=" + TimeUtil.addMinsToTime(endTime, -37)); - - //start in between and end after end - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, 60) - + "&end=" + TimeUtil.addMinsToTime(endTime, 100)); - - // both start end out od range - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + TimeUtil.addMinsToTime(startTime, -100) - + "&end=" + TimeUtil.addMinsToTime(endTime, 100)); - - // end only - r = prism.getProcessHelper().getInstanceSummary(processName, - "?end=" + TimeUtil.addMinsToTime(endTime, -30)); - } - - /** - * Adjust multi-cluster process. Submit and schedule it. Get its instances summary. - */ - @Test(enabled = true, timeOut = 1200000) - public void testSummaryMultiClusterProcess() throws JAXBException, - ParseException, IOException, URISyntaxException, AuthenticationException, - InterruptedException { - processBundle.setProcessValidity(startTime, endTime); - processBundle.addClusterToBundle(bundles[1].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.addClusterToBundle(bundles[2].getClusters().get(0), - ClusterType.SOURCE, null, null); - processBundle.submitFeedsScheduleProcess(prism); - InstancesSummaryResult r = prism.getProcessHelper() - .getInstanceSummary(processName, "?start=" + startTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - - r = prism.getProcessHelper().getInstanceSummary(processName, - "?start=" + startTime + "&end=" + endTime); - } - - /** - * Adjust multi-cluster feed. Submit and schedule it. Get its instances summary. - */ - @Test(enabled = true, timeOut = 1200000) - public void testSummaryMultiClusterFeed() throws JAXBException, ParseException, IOException, - URISyntaxException, OozieClientException, AuthenticationException, - InterruptedException { - - //create desired feed - String feed = bundles[0].getDataSets().get(0); - - //cluster_1 is target, cluster_2 is source and cluster_3 is neutral - feed = FeedMerlin.fromString(feed).clearFeedClusters().toString(); - - feed = FeedMerlin.fromString(feed).addFeedCluster( - new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0))) - .withRetention("days(100000)", ActionType.DELETE) - .withValidity(startTime, "2099-10-01T12:10Z") - .build()).toString(); - - feed = FeedMerlin.fromString(feed).addFeedCluster( - new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0))) - .withRetention("days(100000)", ActionType.DELETE) - .withValidity(startTime, "2099-10-01T12:25Z") - .withClusterType(ClusterType.TARGET) - .withDataLocation(feedInputPath) - .build()).toString(); - - feed = FeedMerlin.fromString(feed).addFeedCluster( - new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0))) - .withRetention("days(100000)", ActionType.DELETE) - .withValidity(startTime, "2099-01-01T00:00Z") - .withClusterType(ClusterType.SOURCE) - .withDataLocation(feedInputPath) - .build()).toString(); - - //submit clusters - Bundle.submitCluster(bundles[0], bundles[1], bundles[2]); - - //create test data on cluster_2 - /*InstanceUtil.createDataWithinDatesAndPrefix(cluster2, - InstanceUtil.oozieDateToDate(startTime), - InstanceUtil.oozieDateToDate(InstanceUtil.getTimeWrtSystemTime(60)), - feedInputPath, 1);*/ - - //submit and schedule feed - prism.getFeedHelper().submitAndSchedule(feed); - - InstancesSummaryResult r = prism.getFeedHelper() - .getInstanceSummary(Util.readEntityName(feed), "?start=" + startTime); - - r = prism.getFeedHelper().getInstanceSummary(Util.readEntityName(feed), - "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(endTime, -20)); - } - - @AfterMethod(alwaysRun = true) - public void tearDown() throws IOException { - removeTestClassEntities(); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/8e49379d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/LogMoverTest.java ---------------------------------------------------------------------- diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/LogMoverTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/LogMoverTest.java deleted file mode 100644 index f936305..0000000 --- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/LogMoverTest.java +++ /dev/null @@ -1,146 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.regression; - -import org.apache.falcon.entity.v0.EntityType; -import org.apache.falcon.entity.v0.Frequency.TimeUnit; -import org.apache.falcon.regression.Entities.ProcessMerlin; -import org.apache.falcon.regression.core.bundle.Bundle; -import org.apache.falcon.regression.core.helpers.ColoHelper; -import org.apache.falcon.regression.core.util.AssertUtil; -import org.apache.falcon.regression.core.util.BundleUtil; -import org.apache.falcon.regression.core.util.HadoopUtil; -import org.apache.falcon.regression.core.util.InstanceUtil; -import org.apache.falcon.regression.core.util.OSUtil; -import org.apache.falcon.regression.core.util.OozieUtil; -import org.apache.falcon.regression.core.util.TimeUtil; -import org.apache.falcon.regression.core.util.Util; -import org.apache.falcon.regression.testHelper.BaseTestClass; -import org.apache.hadoop.fs.FileSystem; -import org.apache.log4j.Logger; -import org.apache.oozie.client.CoordinatorAction; -import org.apache.oozie.client.Job; -import org.apache.oozie.client.OozieClient; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import java.util.List; - -/** - * LogMover Test. - * Adds job launcher success/failure logs to falcon staging directory. - * It is not working for map-reduce actions(FALCON-1038). - * Using pig-action to test this feature. - */ -@Test(groups = "embedded") -public class LogMoverTest extends BaseTestClass { - - private ColoHelper cluster = servers.get(0); - private FileSystem clusterFS = serverFS.get(0); - private OozieClient clusterOC = serverOC.get(0); - private String pigTestDir = cleanAndGetTestDir(); - private String aggregateWorkflowDir = cleanAndGetTestDir() + "/aggregator"; - private String inputPath = pigTestDir + "/input" + MINUTE_DATE_PATTERN; - private String propPath = pigTestDir + "/LogMover"; - private static final Logger LOGGER = Logger.getLogger(LogMoverTest.class); - private String processName; - private String process; - private String startDate; - private String endDate; - - @BeforeMethod(alwaysRun = true) - public void setUp() throws Exception { - LOGGER.info("in @BeforeMethod"); - startDate = TimeUtil.getTimeWrtSystemTime(-3); - endDate = TimeUtil.getTimeWrtSystemTime(3); - - LOGGER.info("startDate : " + startDate + " , endDate : " + endDate); - //copy pig script and workflow - HadoopUtil.uploadDir(clusterFS, aggregateWorkflowDir, OSUtil.concat(OSUtil.RESOURCES, "LogMover")); - Bundle bundle = BundleUtil.readELBundle(); - bundles[0] = new Bundle(bundle, cluster); - bundles[0].generateUniqueBundle(this); - bundles[0].setInputFeedDataPath(inputPath); - bundles[0].setInputFeedPeriodicity(1, TimeUnit.minutes); - bundles[0].setOutputFeedLocationData(pigTestDir + "/output-data" + MINUTE_DATE_PATTERN); - bundles[0].setOutputFeedAvailabilityFlag("_SUCCESS"); - bundles[0].setProcessWorkflow(aggregateWorkflowDir); - bundles[0].setProcessInputNames("INPUT"); - bundles[0].setProcessOutputNames("OUTPUT"); - bundles[0].setProcessValidity(startDate, endDate); - bundles[0].setProcessPeriodicity(1, TimeUnit.minutes); - bundles[0].setOutputFeedPeriodicity(1, TimeUnit.minutes); - - List<String> dataDates = TimeUtil.getMinuteDatesOnEitherSide(startDate, endDate, 20); - HadoopUtil.flattenAndPutDataInFolder(clusterFS, OSUtil.NORMAL_INPUT, - bundles[0].getFeedDataPathPrefix(), dataDates); - - // Defining path to be used in pig script - final ProcessMerlin processElement = bundles[0].getProcessObject(); - processElement.clearProperties().withProperty("inputPath", propPath); - bundles[0].setProcessData(processElement.toString()); - process = bundles[0].getProcessData(); - processName = Util.readEntityName(process); - - } - - @AfterMethod(alwaysRun = true) - public void tearDown() { - removeTestClassEntities(); - } - - /** - *Schedule a process. It should succeed and job launcher success information - * should be present in falcon staging directory. - */ - @Test(groups = {"singleCluster"}) - public void logMoverSucceedTest() throws Exception { - bundles[0].submitFeedsScheduleProcess(prism); - AssertUtil.checkStatus(clusterOC, EntityType.PROCESS, process, Job.Status.RUNNING); - - //Copy data to let pig job succeed - HadoopUtil.copyDataToFolder(clusterFS, propPath, OSUtil.concat(OSUtil.RESOURCES, "pig")); - - InstanceUtil.waitTillInstancesAreCreated(clusterOC, bundles[0].getProcessData(), 0); - OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0); - InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 1, - CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS); - - AssertUtil.assertLogMoverPath(true, processName, clusterFS, "process", "Success logs are not present"); - } - - /** - *Schedule a process. It should fail and job launcher failure information - * should be present in falcon staging directory. - */ - @Test(groups = {"singleCluster"}) - public void logMoverFailTest() throws Exception { - bundles[0].submitFeedsScheduleProcess(prism); - AssertUtil.checkStatus(clusterOC, EntityType.PROCESS, process, Job.Status.RUNNING); - - InstanceUtil.waitTillInstancesAreCreated(clusterOC, bundles[0].getProcessData(), 0); - OozieUtil.createMissingDependencies(cluster, EntityType.PROCESS, processName, 0); - InstanceUtil.waitTillInstanceReachState(clusterOC, bundles[0].getProcessName(), 1, - CoordinatorAction.Status.KILLED, EntityType.PROCESS); - - AssertUtil.assertLogMoverPath(false, processName, clusterFS, "process", "Failed logs are not present"); - } - -}
