http://git-wip-us.apache.org/repos/asf/oozie/blob/ba68347b/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java
b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index ca3f883..4442513 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -21,22 +21,14 @@ package org.apache.oozie.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
-import java.io.FileReader;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
-import java.net.InetAddress;
import java.net.URL;
import java.util.*;
-import java.net.UnknownHostException;
-import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-import javax.persistence.EntityManager;
-import javax.persistence.FlushModeType;
-import javax.persistence.Query;
-
import junit.framework.TestCase;
import org.apache.commons.io.FilenameUtils;
@@ -51,39 +43,20 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.LoggingEvent;
-import org.apache.oozie.BundleActionBean;
-import org.apache.oozie.BundleJobBean;
-import org.apache.oozie.CoordinatorActionBean;
-import org.apache.oozie.CoordinatorJobBean;
-import org.apache.oozie.SLAEventBean;
-import org.apache.oozie.WorkflowActionBean;
-import org.apache.oozie.WorkflowJobBean;
-import org.apache.oozie.dependency.FSURIHandler;
-import org.apache.oozie.dependency.HCatURIHandler;
import org.apache.oozie.service.ConfigurationService;
-import org.apache.oozie.service.HCatAccessorService;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.JMSAccessorService;
import org.apache.oozie.service.JPAService;
-import org.apache.oozie.service.PartitionDependencyManagerService;
import org.apache.oozie.service.ServiceException;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.StoreService;
-import org.apache.oozie.service.URIHandlerService;
-import org.apache.oozie.sla.SLARegistrationBean;
-import org.apache.oozie.sla.SLASummaryBean;
-import org.apache.oozie.store.StoreException;
import org.apache.oozie.test.MiniHCatServer.RUNMODE;
import org.apache.oozie.test.hive.MiniHS2;
import org.apache.oozie.util.ClasspathUtils;
@@ -109,169 +82,24 @@ import org.apache.oozie.util.XLog;
* From within testcases, system properties must be changed using the {@link
#setSystemProperty} method.
*/
public abstract class XTestCase extends TestCase {
- private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES =
EnumSet.of(YarnApplicationState.FAILED, YarnApplicationState.KILLED,
YarnApplicationState.FINISHED);
- private Map<String, String> sysProps;
+ private static EnumSet<YarnApplicationState> YARN_TERMINAL_STATES =
+ EnumSet.of(YarnApplicationState.FAILED,
YarnApplicationState.KILLED, YarnApplicationState.FINISHED);
+ protected static final File OOZIE_SRC_DIR = new
TestPropertiesLoader().loadTestPropertiesOrThrow();
+ private final TestCaseDirectories testCaseDirectories = new
TestCaseDirectories();
+ private final TestSystemProperties testSystemProperties = new
TestSystemProperties();
+
+ private final TestConfigurations testConfigurations = new
TestConfigurations();
private String testCaseDir;
+
private String testCaseConfDir;
private String hadoopVersion;
protected XLog log = new XLog(LogFactory.getLog(getClass()));
- protected static File OOZIE_SRC_DIR = null;
- private static final String OOZIE_TEST_PROPERTIES =
"oozie.test.properties";
+ static final String OOZIE_TEST_PROPERTIES = "oozie.test.properties";
protected static final String SYSTEM_LINE_SEPARATOR =
System.getProperty("line.separator");
-
- public static float WAITFOR_RATIO =
Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
- protected static final String localActiveMQBroker =
"vm://localhost?broker.persistent=false";
- protected static final String ActiveMQConnFactory =
"org.apache.activemq.jndi.ActiveMQInitialContextFactory";
-
- static {
- try {
- OOZIE_SRC_DIR = new File("core").getAbsoluteFile();
- if (!OOZIE_SRC_DIR.exists()) {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
- OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
- }
- if (!OOZIE_SRC_DIR.exists()) {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile().getParentFile();
- OOZIE_SRC_DIR = new File(OOZIE_SRC_DIR, "core");
- }
- if (!OOZIE_SRC_DIR.exists()) {
- // We're probably being run from outside of Oozie (e.g.
MiniOozie), so just use a dummy location here.
- // Anything that uses this location should have a fallback
anyway.
- OOZIE_SRC_DIR = new File(".");
- } else {
- OOZIE_SRC_DIR = OOZIE_SRC_DIR.getParentFile();
- }
-
- final String testPropsFile =
System.getProperty(OOZIE_TEST_PROPERTIES, "test.properties");
- final File file = new File(testPropsFile).isAbsolute()
- ? new File(testPropsFile) : new File(OOZIE_SRC_DIR,
testPropsFile);
- if (file.exists()) {
- System.out.println();
-
System.out.println("*********************************************************************************");
- System.out.println("Loading test system properties from: " +
file.getAbsolutePath());
- System.out.println();
- final Properties props = new Properties();
- props.load(new FileReader(file));
- for (final Map.Entry entry : props.entrySet()) {
- if (!System.getProperties().containsKey(entry.getKey())) {
- System.setProperty((String) entry.getKey(), (String)
entry.getValue());
- System.out.println(entry.getKey() + " = " +
entry.getValue());
- }
- else {
- System.out.println(entry.getKey() + " IGNORED, using
command line value = " +
- System.getProperty((String) entry.getKey()));
- }
- }
-
System.out.println("*********************************************************************************");
- System.out.println();
- }
- else {
- if (System.getProperty(OOZIE_TEST_PROPERTIES) != null) {
- System.err.println();
- System.err.println("ERROR: Specified test file does not
exist: " +
- System.getProperty(OOZIE_TEST_PROPERTIES));
- System.err.println();
- System.exit(-1);
- }
- }
- } catch (final IOException ex) {
- throw new RuntimeException(ex);
- }
-
- }
-
- /**
- * System property to specify the parent directory for the 'oozietests'
directory to be used as base for all test
- * working directories. </p> If this property is not set, the assumed
value is '/tmp'.
- */
- public static final String OOZIE_TEST_DIR = "oozie.test.dir";
-
- /**
- * System property to specify the Hadoop Job Tracker to use for testing.
</p> If this property is not set, the
- * assumed value is 'locahost:9001'.
- */
- public static final String OOZIE_TEST_JOB_TRACKER =
"oozie.test.job.tracker";
-
- /**
- * System property to specify the Hadoop Name Node to use for testing.
</p> If this property is not set, the assumed
- * value is 'locahost:9000'.
- */
- public static final String OOZIE_TEST_NAME_NODE = "oozie.test.name.node";
-
- /**
- * System property to specify the second Hadoop Name Node to use for
testing. </p> If this property is not set, the assumed
- * value is 'locahost:9100'.
- */
- public static final String OOZIE_TEST_NAME_NODE2 = "oozie.test.name.node2";
-
- /**
- * System property to specify the Hadoop Version to use for testing. </p>
If this property is not set, the assumed
- * value is "0.20.0"
- */
- public static final String HADOOP_VERSION = "hadoop.version";
-
- /**
- * System property that specifies the user that test oozie instance runs
as.
- * The value of this property defaults to the "${user.name} system
property.
- */
- public static final String TEST_OOZIE_USER_PROP = "oozie.test.user.oozie";
-
- /**
- * System property that specifies the default test user name used by
- * the tests. The defalt value of this property is <tt>test</tt>.
- */
- public static final String TEST_USER1_PROP = "oozie.test.user.test";
-
- /**
- * System property that specifies an auxilliary test user name used by the
- * tests. The default value of this property is <tt>test2</tt>.
- */
- public static final String TEST_USER2_PROP = "oozie.test.user.test2";
-
- /**
- * System property that specifies another auxilliary test user name used by
- * the tests. The default value of this property is <tt>test3</tt>.
- */
- public static final String TEST_USER3_PROP = "oozie.test.user.test3";
-
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- public static final String TEST_GROUP_PROP = "oozie.test.group";
-
- /**
- * System property that specifies the test groiup used by the tests.
- * The default value of this property is <tt>testg</tt>.
- */
- public static final String TEST_GROUP_PROP2 = "oozie.test.group2";
-
- /**
- * System property that specifies the wait time, in seconds, between
testcases before
- * triggering a shutdown. The default value is 10 sec.
- */
- public static final String TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT =
"oozie.test.minicluster.monitor.shutdown.wait";
-
- /**
- * Name of the shell command
- */
- protected static final String SHELL_COMMAND_NAME = (Shell.WINDOWS) ? "cmd"
: "bash";
-
- /**
- * Extension for shell script files
- */
- protected static final String SHELL_COMMAND_SCRIPTFILE_EXTENSION =
(Shell.WINDOWS) ? "cmd" : "sh";
-
- /**
- * Option for shell command to pass script files
- */
- protected static final String SHELL_COMMAND_SCRIPTFILE_OPTION =
(Shell.WINDOWS) ? "/c" : "-c";
-
- /**
- * Minimal set of require Services for cleaning up the database ({@link
JPAService} and {@link StoreService})
- */
- private static final String MINIMAL_SERVICES_FOR_DB_CLEANUP =
JPAService.class.getName() + "," + StoreService.class.getName();
+ protected static float WAITFOR_RATIO =
Float.parseFloat(System.getProperty("oozie.test.waitfor.ratio", "1"));
+ protected static final String LOCAL_ACTIVE_MQ_BROKER =
"vm://localhost?broker.persistent=false";
+ protected static final String ACTIVE_MQ_CONN_FACTORY =
"org.apache.activemq.jndi.ActiveMQInitialContextFactory";
/**
* Initialize the test working directory. <p/> If it does not exist it
creates it, if it already exists it deletes
@@ -295,37 +123,52 @@ public abstract class XTestCase extends TestCase {
protected void setUp(final boolean cleanUpDBTables) throws Exception {
RUNNING_TESTCASES.incrementAndGet();
super.setUp();
- final String baseDir = System.getProperty(OOZIE_TEST_DIR, new
File("target/test-data").getAbsolutePath());
- String msg = null;
- final File f = new File(baseDir);
- if (!f.isAbsolute()) {
- msg = XLog.format("System property [{0}]=[{1}] must be set to an
absolute path", OOZIE_TEST_DIR, baseDir);
+
+ testCaseDirectories.createTestDirOrError();
+
+ hadoopVersion = System.getProperty(TestConstants.HADOOP_VERSION,
"0.20.0");
+ testCaseDir = testCaseDirectories.createTestCaseDir(this, true);
+
+ setupOozieHome();
+
+ testCaseConfDir = createTestCaseSubDir("conf");
+
+ final InputStream oozieSiteSourceStream = loadTestOozieSiteOrError();
+
+ setupOozieSiteConfiguration(oozieSiteSourceStream);
+
+ final File hadoopConfDir = copyAndGetHadoopConfig();
+
+ testSystemProperties.setupSystemProperties(testCaseDir);
+ if (testSystemProperties.isEmbeddedHadoop()) {
+ setUpEmbeddedHadoop(testCaseDir);
}
- else {
- if (baseDir.length() < 4) {
- msg = XLog.format("System property [{0}]=[{1}] path must be at
least 4 chars", OOZIE_TEST_DIR, baseDir);
+ if (testSystemProperties.isEmbeddedHadoop2()) {
+ setUpEmbeddedHadoop2();
+ }
+
+ if (yarnCluster != null) {
+ try (final OutputStream os = new FileOutputStream(new
File(hadoopConfDir, "core-site.xml"))) {
+ final Configuration conf =
testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
+ conf.writeXml(os);
}
}
- if (msg != null) {
- System.err.println();
- System.err.println(msg);
- System.exit(-1);
+
+ if (System.getProperty("oozie.test.metastore.server",
"false").equals("true")) {
+ setupHCatalogServer();
}
- f.mkdirs();
- if (!f.exists() || !f.isDirectory()) {
- System.err.println();
- System.err.println(XLog.format("Could not create test dir [{0}]",
baseDir));
- System.exit(-1);
+
+ if (System.getProperty("oozie.test.hive.server.2",
"false").equals("true")) {
+ setupHiveServer2();
}
- hadoopVersion = System.getProperty(HADOOP_VERSION, "0.20.0");
- testCaseDir = createTestCaseDir(this, true);
- //setting up Oozie HOME and Oozie conf directory
- setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
- Services.setOozieHome();
- testCaseConfDir = createTestCaseSubDir("conf");
+ // Cleanup any leftover database data to make sure we start each test
with an empty database
+ if (cleanUpDBTables) {
+ cleanUpDBTables();
+ }
+ }
- // load test Oozie site
+ private InputStream loadTestOozieSiteOrError() throws IOException {
final String oozieTestDB = System.getProperty("oozie.test.db",
"hsqldb");
final String defaultOozieSize =
new File(OOZIE_SRC_DIR, "core/src/test/resources/" +
oozieTestDB + "-oozie-site.xml").getAbsolutePath();
@@ -354,6 +197,15 @@ public abstract class XTestCase extends TestCase {
System.exit(-1);
}
}
+ return oozieSiteSourceStream;
+ }
+
+ private void setupOozieHome() throws ServiceException {
+ setSystemProperty(Services.OOZIE_HOME_DIR, testCaseDir);
+ Services.setOozieHome();
+ }
+
+ private void setupOozieSiteConfiguration(final InputStream
oozieSiteSourceStream) throws IOException {
// Copy the specified oozie-site file from oozieSiteSourceStream to
the test case dir as oozie-site.xml
final Configuration oozieSiteConf = new Configuration(false);
oozieSiteConf.addResource(oozieSiteSourceStream);
@@ -366,14 +218,16 @@ public abstract class XTestCase extends TestCase {
oozieSiteConf.set(Services.CONF_SERVICE_CLASSES,
classes.replaceAll("org.apache.oozie.service.ShareLibService,", ""));
// Make sure to create the Oozie DB during unit tests
oozieSiteConf.set(JPAService.CONF_CREATE_DB_SCHEMA, "true");
- File target = new File(testCaseConfDir, "oozie-site.xml");
+ final File target = new File(testCaseConfDir, "oozie-site.xml");
oozieSiteConf.writeXml(new FileOutputStream(target));
+ }
+ private File copyAndGetHadoopConfig() throws IOException {
final File hadoopConfDir = new File(testCaseConfDir, "hadoop-conf");
hadoopConfDir.mkdir();
final File actionConfDir = new File(testCaseConfDir, "action-conf");
actionConfDir.mkdir();
- source = new File(OOZIE_SRC_DIR,
"core/src/test/resources/hadoop-config.xml");
+ final File source = new File(OOZIE_SRC_DIR,
"core/src/test/resources/hadoop-config.xml");
InputStream hadoopConfigResourceStream = null;
if (!source.exists()) {
// If we can't find it, try using the class loader (useful if
we're using XTestCase from outside core)
@@ -392,49 +246,9 @@ public abstract class XTestCase extends TestCase {
} else {
hadoopConfigResourceStream = new FileInputStream(source);
}
- target = new File(hadoopConfDir, "hadoop-site.xml");
+ final File target = new File(hadoopConfDir, "hadoop-site.xml");
IOUtils.copyStream(hadoopConfigResourceStream, new
FileOutputStream(target));
-
- if (System.getProperty("oozielocal.log") == null) {
- setSystemProperty("oozielocal.log", "/tmp/oozielocal.log");
- }
- if (System.getProperty("oozie.test.hadoop.security",
"simple").equals("kerberos")) {
-
System.setProperty("oozie.service.HadoopAccessorService.kerberos.enabled",
"true");
- }
- if (System.getProperty("oozie.test.hadoop.minicluster",
"true").equals("true")) {
- setUpEmbeddedHadoop(getTestCaseDir());
- // Second cluster is not necessary without the first one
- if (System.getProperty("oozie.test.hadoop.minicluster2",
"false").equals("true")) {
- setUpEmbeddedHadoop2();
- }
- }
-
- if (System.getProperty("oozie.test.db.host") == null) {
- System.setProperty("oozie.test.db.host", "localhost");
- }
- setSystemProperty(ConfigurationService.OOZIE_DATA_DIR, testCaseDir);
-
- setSystemProperty(HadoopAccessorService.SUPPORTED_FILESYSTEMS, "*");
-
- if (yarnCluster != null) {
- try (final OutputStream os = new FileOutputStream(new
File(hadoopConfDir, "core-site.xml"))) {
- final Configuration conf = createJobConfFromYarnCluster();
- conf.writeXml(os);
- }
- }
-
- if (System.getProperty("oozie.test.metastore.server",
"false").equals("true")) {
- setupHCatalogServer();
- }
-
- if (System.getProperty("oozie.test.hive.server.2",
"false").equals("true")) {
- setupHiveServer2();
- }
-
- // Cleanup any leftover database data to make sure we start each test
with an empty database
- if (cleanUpDBTables) {
- cleanUpDBTables();
- }
+ return hadoopConfDir;
}
/**
@@ -444,8 +258,7 @@ public abstract class XTestCase extends TestCase {
protected void tearDown() throws Exception {
tearDownHiveServer2();
tearDownHCatalogServer();
- resetSystemProperties();
- sysProps = null;
+ testSystemProperties.resetSystemProperties();
testCaseDir = null;
super.tearDown();
RUNNING_TESTCASES.decrementAndGet();
@@ -499,7 +312,7 @@ public abstract class XTestCase extends TestCase {
* @return Oozie's user Id for running the test cases.
*/
public static String getOozieUser() {
- return System.getProperty(TEST_OOZIE_USER_PROP,
System.getProperty("user.name"));
+ return System.getProperty(TestConstants.TEST_OOZIE_USER_PROP,
System.getProperty("user.name"));
}
/**
@@ -508,7 +321,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser() {
- return System.getProperty(TEST_USER1_PROP, "test");
+ return System.getProperty(TestConstants.TEST_USER1_PROP, "test");
}
/**
@@ -518,7 +331,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser2() {
- return System.getProperty(TEST_USER2_PROP, "test2");
+ return System.getProperty(TestConstants.TEST_USER2_PROP, "test2");
}
/**
@@ -528,7 +341,7 @@ public abstract class XTestCase extends TestCase {
* @return the user Id.
*/
protected static String getTestUser3() {
- return System.getProperty(TEST_USER3_PROP, "test3");
+ return System.getProperty(TestConstants.TEST_USER3_PROP, "test3");
}
/**
@@ -537,7 +350,7 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup() {
- return System.getProperty(TEST_GROUP_PROP, "testg");
+ return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg");
}
/**
@@ -546,71 +359,11 @@ public abstract class XTestCase extends TestCase {
* @return the test group.
*/
protected static String getTestGroup2() {
- return System.getProperty(TEST_GROUP_PROP, "testg2");
- }
-
- /**
- * Return the test working directory.
- * <p/>
- * It returns
<code>${oozie.test.dir}/oozietests/TESTCLASSNAME/TESTMETHODNAME</code>.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @return the test working directory.
- */
- private String getTestCaseDirInternal(final TestCase testCase) {
- ParamChecker.notNull(testCase, "testCase");
- File dir = new File(System.getProperty(OOZIE_TEST_DIR,
"target/test-data"));
- dir = new File(dir, "oozietests").getAbsoluteFile();
- dir = new File(dir, testCase.getClass().getName());
- dir = new File(dir, testCase.getName());
- return dir.getAbsolutePath();
+ return System.getProperty(TestConstants.TEST_GROUP_PROP, "testg2");
}
protected void delete(final File file) throws IOException {
- ParamChecker.notNull(file, "file");
- if (file.getAbsolutePath().length() < 5) {
- throw new RuntimeException(XLog.format("path [{0}] is too short,
not deleting", file.getAbsolutePath()));
- }
- if (file.exists()) {
- if (file.isDirectory()) {
- final File[] children = file.listFiles();
- if (children != null) {
- for (final File child : children) {
- delete(child);
- }
- }
- }
- if (!file.delete()) {
- throw new RuntimeException(XLog.format("could not delete path
[{0}]", file.getAbsolutePath()));
- }
- }
- else {
- // With a dangling symlink, exists() doesn't return true so try to
delete it anyway; we fail silently in case the file
- // truely doesn't exist
- file.delete();
- }
- }
-
- /**
- * Create the test working directory.
- *
- * @param testCase testcase instance to obtain the working directory.
- * @param cleanup indicates if the directory should be cleaned up if it
exists.
- * @return return the path of the test working directory, it is always an
absolute path.
- * @throws Exception if the test working directory could not be created or
cleaned up.
- */
- private String createTestCaseDir(final TestCase testCase, final boolean
cleanup) throws Exception {
- final String testCaseDir = getTestCaseDirInternal(testCase);
- System.out.println();
- System.out.println(XLog.format("Setting testcase work dir[{0}]",
testCaseDir));
- if (cleanup) {
- delete(new File(testCaseDir));
- }
- final File dir = new File(testCaseDir);
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase
dir[{0}]", testCaseDir));
- }
- return testCaseDir;
+ testCaseDirectories.delete(file);
}
/**
@@ -620,23 +373,8 @@ public abstract class XTestCase extends TestCase {
* @return the absolute path to the created directory.
*/
protected String createTestCaseSubDir(final String... subDirNames) {
- ParamChecker.notNull(subDirNames, "subDirName");
- if (subDirNames.length == 0) {
- throw new RuntimeException(XLog.format("Could not create testcase
subdir ''; it already exists"));
- }
-
- File dir = new File(testCaseDir);
- for (int i = 0; i < subDirNames.length; i++) {
- ParamChecker.notNull(subDirNames[i], "subDirName[" + i + "]");
- dir = new File(dir, subDirNames[i]);
- }
-
- if (!dir.mkdirs()) {
- throw new RuntimeException(XLog.format("Could not create testcase
subdir[{0}]", dir));
- }
- return dir.getAbsolutePath();
+ return testCaseDirectories.createTestCaseSubdir(testCaseDir,
subDirNames);
}
-
/**
* Set a system property for the duration of the method test case.
* <p/>
@@ -646,42 +384,13 @@ public abstract class XTestCase extends TestCase {
* @param value value to set.
*/
protected void setSystemProperty(final String name, final String value) {
- if (sysProps == null) {
- sysProps = new HashMap<String, String>();
- }
- if (!sysProps.containsKey(name)) {
- final String currentValue = System.getProperty(name);
- sysProps.put(name, currentValue);
- }
- if (value != null) {
- System.setProperty(name, value);
- }
- else {
- System.getProperties().remove(name);
- }
- }
-
- /**
- * Reset changed system properties to their original values. <p/> Called
from {@link #tearDown}.
- */
- private void resetSystemProperties() {
- if (sysProps != null) {
- for (final Map.Entry<String, String> entry : sysProps.entrySet()) {
- if (entry.getValue() != null) {
- System.setProperty(entry.getKey(), entry.getValue());
- }
- else {
- System.getProperties().remove(entry.getKey());
- }
- }
- sysProps.clear();
- }
+ testSystemProperties.setSystemProperty(name, value);
}
/**
* A predicate 'closure' used by {@link XTestCase#waitFor} method.
*/
- public static interface Predicate {
+ public interface Predicate {
/**
* Perform a predicate evaluation.
@@ -689,7 +398,7 @@ public abstract class XTestCase extends TestCase {
* @return the boolean result of the evaluation.
* @throws Exception thrown if the predicate evaluation could not
evaluate.
*/
- public boolean evaluate() throws Exception;
+ boolean evaluate() throws Exception;
}
/**
@@ -741,45 +450,45 @@ public abstract class XTestCase extends TestCase {
}
/**
- * Return the Hadoop Job Tracker to use for testing. </p> The value is
taken from the Java sytem property {@link
- * #OOZIE_TEST_JOB_TRACKER}, if this property is not set, the assumed
value is 'locahost:9001'.
+ * Return the Hadoop Job Tracker to use for testing. </p> The value is
taken from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_JOB_TRACKER}, if this property is not
set, the assumed value is 'locahost:9001'.
*
* @return the job tracker URI.
*/
protected String getJobTrackerUri() {
- return System.getProperty(OOZIE_TEST_JOB_TRACKER, "localhost:9001");
+ return System.getProperty(TestConstants.OOZIE_TEST_JOB_TRACKER,
"localhost:9001");
}
/**
- * Return the Hadoop Name Node to use for testing. </p> The value is taken
from the Java sytem property {@link
- * #OOZIE_TEST_NAME_NODE}, if this property is not set, the assumed value
is 'locahost:9000'.
+ * Return the Hadoop Name Node to use for testing. </p> The value is taken
from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_NAME_NODE}, if this property is not
set, the assumed value is 'locahost:9000'.
*
* @return the name node URI.
*/
protected String getNameNodeUri() {
- return System.getProperty(OOZIE_TEST_NAME_NODE,
"hdfs://localhost:9000");
+ return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE,
"hdfs://localhost:9000");
}
/**
- * Return the second Hadoop Name Node to use for testing. </p> The value
is taken from the Java sytem property {@link
- * #OOZIE_TEST_NAME_NODE2}, if this property is not set, the assumed value
is 'locahost:9100'.
+ * Return the second Hadoop Name Node to use for testing. </p> The value
is taken from the Java sytem property
+ * {@link TestConstants#OOZIE_TEST_NAME_NODE2}, if this property is not
set, the assumed value is 'locahost:9100'.
*
* @return the second name node URI.
*/
protected String getNameNode2Uri() {
- return System.getProperty(OOZIE_TEST_NAME_NODE2,
"hdfs://localhost:9100");
+ return System.getProperty(TestConstants.OOZIE_TEST_NAME_NODE2,
"hdfs://localhost:9100");
}
- public String getKeytabFile() {
+ protected String getKeytabFile() {
final String defaultFile = new File(System.getProperty("user.home"),
"oozie.keytab").getAbsolutePath();
return System.getProperty("oozie.test.kerberos.keytab.file",
defaultFile);
}
- public String getRealm() {
+ String getRealm() {
return System.getProperty("oozie.test.kerberos.realm", "LOCALHOST");
}
- public String getOoziePrincipal() {
+ protected String getOoziePrincipal() {
return System.getProperty("oozie.test.kerberos.oozie.principal",
getOozieUser() + "/localhost") + "@" + getRealm();
}
@@ -795,108 +504,7 @@ public abstract class XTestCase extends TestCase {
* @throws Exception
*/
protected final void cleanUpDBTables() throws Exception {
- // If the Services are already loaded, then a test is likely calling
this for something specific and we shouldn't mess with
- // the Services; so just cleanup the database
- if (Services.get() != null) {
- cleanUpDBTablesInternal();
- }
- else {
- // Otherwise, this is probably being called during setup() and we
should just load the minimal set of required Services
- // needed to cleanup the database and shut them down when done;
the test will likely start its own Services later and
- // we don't want to interfere
- try {
- final Services services = new Services();
- services.getConf().set(Services.CONF_SERVICE_CLASSES,
MINIMAL_SERVICES_FOR_DB_CLEANUP);
- services.init();
- cleanUpDBTablesInternal();
- } finally {
- if (Services.get() != null) {
- Services.get().destroy();
- }
- }
- }
- }
-
- private void cleanUpDBTablesInternal() throws StoreException {
- final EntityManager entityManager =
Services.get().get(JPAService.class).getEntityManager();
- entityManager.setFlushMode(FlushModeType.COMMIT);
- entityManager.getTransaction().begin();
-
- Query q = entityManager.createNamedQuery("GET_WORKFLOWS");
- final List<WorkflowJobBean> wfjBeans = q.getResultList();
- final int wfjSize = wfjBeans.size();
- for (final WorkflowJobBean w : wfjBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_ACTIONS");
- final List<WorkflowActionBean> wfaBeans = q.getResultList();
- final int wfaSize = wfaBeans.size();
- for (final WorkflowActionBean w : wfaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_COORD_JOBS");
- final List<CoordinatorJobBean> cojBeans = q.getResultList();
- final int cojSize = cojBeans.size();
- for (final CoordinatorJobBean w : cojBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_COORD_ACTIONS");
- final List<CoordinatorActionBean> coaBeans = q.getResultList();
- final int coaSize = coaBeans.size();
- for (final CoordinatorActionBean w : coaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_BUNDLE_JOBS");
- final List<BundleJobBean> bjBeans = q.getResultList();
- final int bjSize = bjBeans.size();
- for (final BundleJobBean w : bjBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_BUNDLE_ACTIONS");
- final List<BundleActionBean> baBeans = q.getResultList();
- final int baSize = baBeans.size();
- for (final BundleActionBean w : baBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createNamedQuery("GET_SLA_EVENTS");
- final List<SLAEventBean> slaBeans = q.getResultList();
- final int slaSize = slaBeans.size();
- for (final SLAEventBean w : slaBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createQuery("select OBJECT(w) from
SLARegistrationBean w");
- final List<SLARegistrationBean> slaRegBeans = q.getResultList();
- final int slaRegSize = slaRegBeans.size();
- for (final SLARegistrationBean w : slaRegBeans) {
- entityManager.remove(w);
- }
-
- q = entityManager.createQuery("select OBJECT(w) from SLASummaryBean
w");
- final List<SLASummaryBean> sdBeans = q.getResultList();
- final int ssSize = sdBeans.size();
- for (final SLASummaryBean w : sdBeans) {
- entityManager.remove(w);
- }
-
- entityManager.getTransaction().commit();
- entityManager.close();
- log.info(wfjSize + " entries in WF_JOBS removed from DB!");
- log.info(wfaSize + " entries in WF_ACTIONS removed from DB!");
- log.info(cojSize + " entries in COORD_JOBS removed from DB!");
- log.info(coaSize + " entries in COORD_ACTIONS removed from DB!");
- log.info(bjSize + " entries in BUNDLE_JOBS removed from DB!");
- log.info(baSize + " entries in BUNDLE_ACTIONS removed from DB!");
- log.info(slaSize + " entries in SLA_EVENTS removed from DB!");
- log.info(slaRegSize + " entries in SLA_REGISTRATION removed from DB!");
- log.info(ssSize + " entries in SLA_SUMMARY removed from DB!");
-
+ new TestDbCleaner().cleanDbTables();
}
private static MiniDFSCluster dfsCluster = null;
@@ -915,33 +523,24 @@ public abstract class XTestCase extends TestCase {
ClasspathUtils.setUsingMiniYarnCluster(true);
final int dataNodes = 2;
final String oozieUser = getOozieUser();
- final JobConf dfsConfig = createDFSConfig();
- final String[] userGroups = new String[]{getTestGroup(),
getTestGroup2()};
- UserGroupInformation.createUserForTesting(oozieUser, userGroups);
- UserGroupInformation.createUserForTesting(getTestUser(),
userGroups);
- UserGroupInformation.createUserForTesting(getTestUser2(),
userGroups);
- UserGroupInformation.createUserForTesting(getTestUser3(), new
String[]{"users"});
+ final JobConf dfsConfig =
testConfigurations.createDFSConfig(getOozieUser(), getTestGroup());
+
+ setupUgi(oozieUser);
try {
- dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
- .numDataNodes(dataNodes)
- .format(true)
- .racks(null)
- .build();
+ setupDfsCluster(dataNodes, dfsConfig);
createHdfsPathsAndSetupPermissions();
- final Configuration yarnConfig = createYarnConfig(dfsConfig);
- yarnCluster = new MiniYARNCluster(this.getClass().getName(),
1, 1, 1, 1);
- yarnCluster.init(yarnConfig);
- yarnCluster.start();
+ setupYarnCluster(dfsConfig);
+
final JobConf jobConf = new JobConf(yarnCluster.getConfig());
- System.setProperty(OOZIE_TEST_JOB_TRACKER,
jobConf.get("mapred.job.tracker"));
+ System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER,
jobConf.get("mapred.job.tracker"));
final String rmAddress =
jobConf.get("yarn.resourcemanager.address");
if (rmAddress != null) {
- System.setProperty(OOZIE_TEST_JOB_TRACKER, rmAddress);
+ System.setProperty(TestConstants.OOZIE_TEST_JOB_TRACKER,
rmAddress);
}
- System.setProperty(OOZIE_TEST_NAME_NODE,
dfsCluster.getFileSystem().getUri().toString());
+ System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE,
dfsCluster.getFileSystem().getUri().toString());
ProxyUsers.refreshSuperUserGroupsConfiguration(dfsConfig);
} catch (final Exception ex) {
shutdownMiniCluster();
@@ -951,6 +550,30 @@ public abstract class XTestCase extends TestCase {
}
}
+ private void setupDfsCluster(final int dataNodes, final JobConf dfsConfig)
throws IOException {
+ dfsCluster = new MiniDFSCluster.Builder(dfsConfig)
+ .numDataNodes(dataNodes)
+ .format(true)
+ .racks(null)
+ .build();
+ }
+
+ private void setupYarnCluster(final JobConf dfsConfig) {
+ final Configuration yarnConfig =
testConfigurations.createYarnConfig(dfsConfig);
+ yarnCluster = new MiniYARNCluster(this.getClass().getName(), 1, 1, 1,
1);
+ yarnCluster.init(yarnConfig);
+ yarnCluster.start();
+ }
+
+ private void setupUgi(final String oozieUser) {
+ final String[] userGroups = new String[]{getTestGroup(),
getTestGroup2()};
+
+ UserGroupInformation.createUserForTesting(oozieUser, userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser2(), userGroups);
+ UserGroupInformation.createUserForTesting(getTestUser3(), new
String[]{"users"});
+ }
+
private void createHdfsPathsAndSetupPermissions() throws IOException {
final FileSystem fileSystem = dfsCluster.getFileSystem();
@@ -975,7 +598,7 @@ public abstract class XTestCase extends TestCase {
try {
System.setProperty("test.build.data",
FilenameUtils.concat(testBuildDataSaved, "2"));
// Only DFS cluster is created based upon current need
- dfsCluster2 = new MiniDFSCluster(createDFSConfig(), 2, true,
null);
+ dfsCluster2 = new
MiniDFSCluster(testConfigurations.createDFSConfig(getOozieUser(),
getTestGroup()), 2, true, null);
final FileSystem fileSystem = dfsCluster2.getFileSystem();
fileSystem.mkdirs(new Path("target/test-data"));
fileSystem.mkdirs(new Path("/user"));
@@ -983,7 +606,7 @@ public abstract class XTestCase extends TestCase {
fileSystem.setPermission(new Path("target/test-data"),
FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"),
FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/tmp"),
FsPermission.valueOf("-rwxrwxrwx"));
- System.setProperty(OOZIE_TEST_NAME_NODE2,
fileSystem.getConf().get("fs.default.name"));
+ System.setProperty(TestConstants.OOZIE_TEST_NAME_NODE2,
fileSystem.getConf().get("fs.default.name"));
} catch (final Exception ex) {
shutdownMiniCluster2();
throw ex;
@@ -994,44 +617,6 @@ public abstract class XTestCase extends TestCase {
}
}
- private JobConf createDFSConfig() throws UnknownHostException {
- final JobConf conf = new JobConf();
- conf.set("dfs.block.access.token.enable", "false");
- conf.set("dfs.permissions", "true");
- conf.set("hadoop.security.authentication", "simple");
-
- //Doing this because Hadoop 1.x does not support '*' if the value is
'*,127.0.0.1'
- final StringBuilder sb = new StringBuilder();
- sb.append("127.0.0.1,localhost");
- for (final InetAddress i :
InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
- sb.append(",").append(i.getCanonicalHostName());
- }
- conf.set("hadoop.proxyuser." + getOozieUser() + ".hosts",
sb.toString());
-
- conf.set("hadoop.proxyuser." + getOozieUser() + ".groups",
getTestGroup());
- conf.set("mapred.tasktracker.map.tasks.maximum", "4");
- conf.set("mapred.tasktracker.reduce.tasks.maximum", "4");
-
- conf.set("hadoop.tmp.dir", "target/test-data" + "/minicluster");
-
- // Scheduler properties required for YARN CapacityScheduler to work
- conf.set("yarn.scheduler.capacity.root.queues", "default");
- conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
- // Required to prevent deadlocks with YARN CapacityScheduler
- conf.set("yarn.scheduler.capacity.maximum-am-resource-percent", "0.5");
-
- return conf;
- }
-
- private Configuration createYarnConfig(final Configuration parentConfig) {
- final Configuration yarnConfig = new YarnConfiguration(parentConfig);
-
- yarnConfig.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
- yarnConfig.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
-
- return yarnConfig;
- }
-
protected void setupHCatalogServer() throws Exception {
if (hcatServer == null) {
hcatServer = new MiniHCatServer(RUNMODE.SERVER, createJobConf());
@@ -1040,13 +625,8 @@ public abstract class XTestCase extends TestCase {
}
}
- protected void tearDownHCatalogServer() throws Exception {
+ private void tearDownHCatalogServer() throws Exception {
// TODO: This doesn't properly shutdown the metastore. For now, just
keep the current one running once it's been started
-// if (hcatServer != null) {
-// hcatServer.shutdown();
-// hcatServer = null;
-// log.info("Metastore server shutdown");
-// }
}
protected void setupHiveServer2() throws Exception {
@@ -1067,7 +647,7 @@ public abstract class XTestCase extends TestCase {
}
}
- protected void tearDownHiveServer2() {
+ private void tearDownHiveServer2() {
if (hiveserver2 != null && hiveserver2.isStarted()) {
hiveserver2.stop();
hiveserver2 = null;
@@ -1117,12 +697,12 @@ public abstract class XTestCase extends TestCase {
private static class MiniClusterShutdownMonitor extends Thread {
- public MiniClusterShutdownMonitor() {
+ MiniClusterShutdownMonitor() {
setDaemon(true);
}
public void run() {
- final long shutdownWait =
Long.parseLong(System.getProperty(TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT,
"10")) * 1000;
+ final long shutdownWait =
Long.parseLong(System.getProperty(TestConstants.TEST_MINICLUSTER_MONITOR_SHUTDOWN_WAIT,
"10")) * 1000;
LAST_TESTCASE_FINISHED.set(System.currentTimeMillis());
while (true) {
if (RUNNING_TESTCASES.get() == 0) {
@@ -1141,49 +721,29 @@ public abstract class XTestCase extends TestCase {
}
}
- @SuppressWarnings("deprecation")
- private JobConf createJobConfFromYarnCluster() {
- final JobConf jobConf = new JobConf();
- final JobConf jobConfYarn = new JobConf(yarnCluster.getConfig());
- for (final Entry<String, String> entry : jobConfYarn) {
- // MiniMRClientClusterFactory sets the job jar in Hadoop 2.0
causing tests to fail
- // TODO call conf.unset after moving completely to Hadoop 2.x
- if (!(entry.getKey().equals("mapreduce.job.jar") ||
entry.getKey().equals("mapred.jar"))) {
- jobConf.set(entry.getKey(), entry.getValue());
- }
- }
- return jobConf;
- }
-
/**
* Returns a jobconf preconfigured to talk with the test
cluster/minicluster.
* @return a jobconf preconfigured to talk with the test
cluster/minicluster.
*/
protected JobConf createJobConf() throws IOException {
- final JobConf jobConf;
-
if (yarnCluster != null) {
- jobConf = createJobConfFromYarnCluster();
+ return
testConfigurations.createJobConfFromYarnCluster(yarnCluster.getConfig());
} else {
- jobConf = new JobConf();
- jobConf.set("mapred.job.tracker", getJobTrackerUri());
- jobConf.set("fs.default.name", getNameNodeUri());
+ return
testConfigurations.createPristineJobConf(getJobTrackerUri(), getNameNodeUri());
}
-
- return jobConf;
}
/**
* A 'closure' used by {@link XTestCase#executeWhileJobTrackerIsShutdown}
method.
*/
- public static interface ShutdownJobTrackerExecutable {
+ public interface ShutdownJobTrackerExecutable {
/**
* Execute some code
*
* @throws Exception thrown if the executed code throws an exception.
*/
- public void execute() throws Exception;
+ void execute() throws Exception;
}
/**
@@ -1202,32 +762,26 @@ public abstract class XTestCase extends TestCase {
protected Services setupServicesForHCatalog() throws ServiceException {
final Services services = new Services();
- setupServicesForHCataLogImpl(services);
+
+ setConfigurationForHCatalogImpl(services);
+
return services;
}
- private void setupServicesForHCataLogImpl(final Services services) {
- final Configuration conf = services.getConf();
- conf.set(Services.CONF_SERVICE_EXT_CLASSES,
- JMSAccessorService.class.getName() + "," +
- PartitionDependencyManagerService.class.getName() + "," +
- HCatAccessorService.class.getName());
- conf.set(HCatAccessorService.JMS_CONNECTIONS_PROPERTIES,
- "default=java.naming.factory.initial#" + ActiveMQConnFactory +
";" +
- "java.naming.provider.url#" + localActiveMQBroker +
- "connectionFactoryNames#" + "ConnectionFactory");
- conf.set(URIHandlerService.URI_HANDLERS,
- FSURIHandler.class.getName() + "," +
HCatURIHandler.class.getName());
+ private void setConfigurationForHCatalogImpl(final Services services) {
+ testConfigurations.setConfigurationForHCatalog(services);
+
setSystemProperty("java.naming.factory.initial",
"org.apache.activemq.jndi.ActiveMQInitialContextFactory");
setSystemProperty("java.naming.provider.url",
"vm://localhost?broker.persistent=false");
}
protected Services setupServicesForHCatalog(final Services services)
throws ServiceException {
- setupServicesForHCataLogImpl(services);
+ setConfigurationForHCatalogImpl(services);
+
return services;
}
- protected YarnApplicationState waitUntilYarnAppState(final String
externalId, final EnumSet<YarnApplicationState> acceptedStates)
+ private YarnApplicationState waitUntilYarnAppState(final String
externalId, final EnumSet<YarnApplicationState> acceptedStates)
throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
final MutableObject<YarnApplicationState> finalState = new
MutableObject<YarnApplicationState>();
@@ -1265,7 +819,7 @@ public abstract class XTestCase extends TestCase {
assertEquals("YARN App state", YarnApplicationState.KILLED, state);
}
- protected YarnApplicationState getYarnApplicationState(String externalId)
throws HadoopAccessorException, IOException, YarnException {
+ protected YarnApplicationState getYarnApplicationState(final String
externalId) throws HadoopAccessorException, IOException, YarnException {
final ApplicationId appId = ConverterUtils.toApplicationId(externalId);
YarnApplicationState state = null;
final JobConf jobConf =
Services.get().get(HadoopAccessorService.class).createJobConf(getJobTrackerUri());
@@ -1305,9 +859,7 @@ public abstract class XTestCase extends TestCase {
}
}
- public TestLogAppender getTestLogAppender() {
+ protected TestLogAppender getTestLogAppender() {
return new TestLogAppender();
}
-
-}
-
+}
\ No newline at end of file