Author: tucu
Date: Mon Mar 5 23:19:01 2012
New Revision: 1297282
URL: http://svn.apache.org/viewvc?rev=1297282&view=rev
Log:
OOZIE-734 Simplify Kerberos/HadoopAccessorService code and remove Kerberos/DoAs
code (tucu)
Removed:
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/AuthHelper.java
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/DoAs.java
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/KerberosAuthHelper.java
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/KerberosDoAs.java
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/KerberosHadoopAccessorService.java
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestKerberosHadoopAccessorService.java
Modified:
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
incubator/oozie/trunk/core/src/main/resources/oozie-default.xml
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/MainTestCase.java
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/PigTestCase.java
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/ShellTestCase.java
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
incubator/oozie/trunk/docs/src/site/twiki/AG_Install.twiki
incubator/oozie/trunk/docs/src/site/twiki/ENG_Building.twiki
incubator/oozie/trunk/examples/src/test/java/org/apache/oozie/example/TestLocalOozieExample.java
incubator/oozie/trunk/release-log.txt
Modified:
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
(original)
+++
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
Mon Mar 5 23:19:01 2012
@@ -43,10 +43,12 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AccessControlException;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
+import
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.util.DiskChecker;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
@@ -648,7 +650,9 @@ public class JavaActionExecutor extends
XLog.getLog(getClass()).debug("Submitting the job through Job
Client for action " + action.getId());
// setting up propagation of the delegation token.
- AuthHelper.get().set(jobClient, launcherJobConf);
+ Token<DelegationTokenIdentifier> mrdt =
jobClient.getDelegationToken(new Text("mr token"));
+ launcherJobConf.getCredentials().addToken(new Text("mr
token"), mrdt);
+
log.debug(WorkflowAppService.HADOOP_JT_KERBEROS_NAME + " = "
+
launcherJobConf.get(WorkflowAppService.HADOOP_JT_KERBEROS_NAME));
log.debug(WorkflowAppService.HADOOP_NN_KERBEROS_NAME + " = "
Modified:
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
(original)
+++
incubator/oozie/trunk/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
Mon Mar 5 23:19:01 2012
@@ -17,13 +17,16 @@
*/
package org.apache.oozie.service;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;
+import
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.security.token.Token;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XConfiguration;
@@ -35,6 +38,8 @@ import java.net.URISyntaxException;
import java.security.PrivilegedExceptionAction;
import java.util.Set;
import java.util.HashSet;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
/**
* The HadoopAccessorService returns HadoopAccessor instances configured to
work on behalf of a user-group. <p/> The
@@ -47,12 +52,22 @@ public class HadoopAccessorService imple
public static final String CONF_PREFIX = Service.CONF_PREFIX +
"HadoopAccessorService.";
public static final String JOB_TRACKER_WHITELIST = CONF_PREFIX +
"jobTracker.whitelist";
public static final String NAME_NODE_WHITELIST = CONF_PREFIX +
"nameNode.whitelist";
+ public static final String KERBEROS_AUTH_ENABLED = CONF_PREFIX +
"kerberos.enabled";
+ public static final String KERBEROS_KEYTAB = CONF_PREFIX + "keytab.file";
+ public static final String KERBEROS_PRINCIPAL = CONF_PREFIX +
"kerberos.principal";
private Set<String> jobTrackerWhitelist = new HashSet<String>();
private Set<String> nameNodeWhitelist = new HashSet<String>();
+ private ConcurrentMap<String, UserGroupInformation> userUgiMap;
+ private String localRealm;
+
public void init(Services services) throws ServiceException {
- for (String name :
services.getConf().getStringCollection(JOB_TRACKER_WHITELIST)) {
+ init(services.getConf());
+ }
+
+ public void init(Configuration conf) throws ServiceException {
+ for (String name : conf.getStringCollection(JOB_TRACKER_WHITELIST)) {
String tmp = name.toLowerCase().trim();
if (tmp.length() == 0) {
continue;
@@ -60,9 +75,9 @@ public class HadoopAccessorService imple
jobTrackerWhitelist.add(tmp);
}
XLog.getLog(getClass()).info(
- "JOB_TRACKER_WHITELIST :" +
services.getConf().getStringCollection(JOB_TRACKER_WHITELIST)
+ "JOB_TRACKER_WHITELIST :" +
conf.getStringCollection(JOB_TRACKER_WHITELIST)
+ ", Total entries :" + jobTrackerWhitelist.size());
- for (String name :
services.getConf().getStringCollection(NAME_NODE_WHITELIST)) {
+ for (String name : conf.getStringCollection(NAME_NODE_WHITELIST)) {
String tmp = name.toLowerCase().trim();
if (tmp.length() == 0) {
continue;
@@ -70,12 +85,48 @@ public class HadoopAccessorService imple
nameNodeWhitelist.add(tmp);
}
XLog.getLog(getClass()).info(
- "NAME_NODE_WHITELIST :" +
services.getConf().getStringCollection(NAME_NODE_WHITELIST)
+ "NAME_NODE_WHITELIST :" +
conf.getStringCollection(NAME_NODE_WHITELIST)
+ ", Total entries :" + nameNodeWhitelist.size());
- init(services.getConf());
+
+ boolean kerberosAuthOn = conf.getBoolean(KERBEROS_AUTH_ENABLED, true);
+ XLog.getLog(getClass()).info("Oozie Kerberos Authentication [{0}]",
(kerberosAuthOn) ? "enabled" : "disabled");
+ if (kerberosAuthOn) {
+ kerberosInit(conf);
+ }
+ else {
+ Configuration ugiConf = new Configuration();
+ ugiConf.set("hadoop.security.authentication", "simple");
+ UserGroupInformation.setConfiguration(ugiConf);
+ }
+ localRealm = conf.get("local.realm");
+
+ userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();
}
- public void init(Configuration serviceConf) throws ServiceException {
+ private void kerberosInit(Configuration serviceConf) throws
ServiceException {
+ try {
+ String keytabFile = serviceConf.get(KERBEROS_KEYTAB,
+
System.getProperty("user.home") + "/oozie.keytab").trim();
+ if (keytabFile.length() == 0) {
+ throw new ServiceException(ErrorCode.E0026,
KERBEROS_KEYTAB);
+ }
+ String principal = serviceConf.get(KERBEROS_PRINCIPAL,
"oozie/localhost@LOCALHOST");
+ if (principal.length() == 0) {
+ throw new ServiceException(ErrorCode.E0026,
KERBEROS_PRINCIPAL);
+ }
+ Configuration conf = new Configuration();
+ conf.set("hadoop.security.authentication", "kerberos");
+ UserGroupInformation.setConfiguration(conf);
+ UserGroupInformation.loginUserFromKeytab(principal,
keytabFile);
+ XLog.getLog(getClass()).info("Got Kerberos ticket, keytab
[{0}], Oozie principal principal [{1}]",
+ keytabFile, principal);
+ }
+ catch (ServiceException ex) {
+ throw ex;
+ }
+ catch (Exception ex) {
+ throw new ServiceException(ErrorCode.E0100,
getClass().getName(), ex.getMessage(), ex);
+ }
}
public void destroy() {
@@ -85,6 +136,16 @@ public class HadoopAccessorService imple
return HadoopAccessorService.class;
}
+ private UserGroupInformation getUGI(String user) throws IOException {
+ UserGroupInformation ugi = userUgiMap.get(user);
+ if (ugi == null) {
+ // taking care of a race condition, the latest UGI will be
discarded
+ ugi = UserGroupInformation.createProxyUser(user,
UserGroupInformation.getLoginUser());
+ userUgiMap.putIfAbsent(user, ugi);
+ }
+ return ugi;
+ }
+
/**
* Return a JobClient created with the provided user/group.
*
@@ -93,58 +154,92 @@ public class HadoopAccessorService imple
* @return JobClient created with the provided user/group.
* @throws HadoopAccessorException if the client could not be created.
*/
- public JobClient createJobClient(String user, String group, JobConf conf)
throws HadoopAccessorException {
+ public JobClient createJobClient(String user, String group, final JobConf
conf) throws HadoopAccessorException {
+ ParamChecker.notEmpty(user, "user");
validateJobTracker(conf.get("mapred.job.tracker"));
- conf = createConfiguration(user, group, conf);
try {
- return new JobClient(conf);
+ UserGroupInformation ugi = getUGI(user);
+ JobClient jobClient = ugi.doAs(new
PrivilegedExceptionAction<JobClient>() {
+ public JobClient run() throws Exception {
+ conf.set("mapreduce.framework.name", "yarn");
+ return new JobClient(conf);
+ }
+ });
+ Token<DelegationTokenIdentifier> mrdt =
jobClient.getDelegationToken(new Text("mr token"));
+ conf.getCredentials().addToken(new Text("mr token"), mrdt);
+ return jobClient;
}
- catch (IOException e) {
- throw new HadoopAccessorException(ErrorCode.E0902, e);
+ catch (InterruptedException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
+ }
+ catch (IOException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
}
}
/**
* Return a FileSystem created with the provided user/group.
- *
- * @param conf Configuration with all necessary information to create the
- * FileSystem.
+ *
+ * @param conf Configuration with all necessary information to create the
FileSystem.
* @return FileSystem created with the provided user/group.
* @throws HadoopAccessorException if the filesystem could not be created.
*/
- public FileSystem createFileSystem(String user, String group,
Configuration conf) throws HadoopAccessorException {
+ public FileSystem createFileSystem(String user, String group, final
Configuration conf)
+ throws HadoopAccessorException {
+ ParamChecker.notEmpty(user, "user");
try {
validateNameNode(new
URI(conf.get("fs.default.name")).getAuthority());
- conf = createConfiguration(user, group, conf);
- return FileSystem.get(conf);
+ UserGroupInformation ugi = getUGI(user);
+ return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ public FileSystem run() throws Exception {
+ Configuration defaultConf = new Configuration();
+ XConfiguration.copy(conf, defaultConf);
+ return FileSystem.get(defaultConf);
+ }
+ });
+ }
+ catch (InterruptedException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
}
- catch (IOException e) {
- throw new HadoopAccessorException(ErrorCode.E0902, e);
+ catch (IOException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
}
- catch (URISyntaxException e) {
- throw new HadoopAccessorException(ErrorCode.E0902, e);
+ catch (URISyntaxException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
}
}
/**
- * Return a FileSystem created with the provided user/group for the
- * specified URI.
- *
+ * Return a FileSystem created with the provided user/group for the
specified URI.
+ *
* @param uri file system URI.
- * @param conf Configuration with all necessary information to create the
- * FileSystem.
+ * @param conf Configuration with all necessary information to create the
FileSystem.
* @return FileSystem created with the provided user/group.
* @throws HadoopAccessorException if the filesystem could not be created.
*/
- public FileSystem createFileSystem(String user, String group, URI uri,
Configuration conf)
+ public FileSystem createFileSystem(String user, String group, final URI
uri, final Configuration conf)
throws HadoopAccessorException {
+ ParamChecker.notEmpty(user, "user");
validateNameNode(uri.getAuthority());
- conf = createConfiguration(user, group, conf);
try {
- return FileSystem.get(uri, conf);
+ UserGroupInformation ugi = getUGI(user);
+ return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ public FileSystem run() throws Exception {
+ Configuration defaultConf = new Configuration();
+
+
defaultConf.set(WorkflowAppService.HADOOP_JT_KERBEROS_NAME, "mapred/_HOST@" +
localRealm);
+
defaultConf.set(WorkflowAppService.HADOOP_NN_KERBEROS_NAME, "hdfs/_HOST@" +
localRealm);
+
+ XConfiguration.copy(conf, defaultConf);
+ return FileSystem.get(uri, defaultConf);
+ }
+ });
}
- catch (IOException e) {
- throw new HadoopAccessorException(ErrorCode.E0902, e);
+ catch (InterruptedException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
+ }
+ catch (IOException ex) {
+ throw new HadoopAccessorException(ErrorCode.E0902, ex);
}
}
@@ -175,23 +270,28 @@ public class HadoopAccessorService imple
}
}
- @SuppressWarnings("unchecked")
- private <C extends Configuration> C createConfiguration(String user,
String group, C conf) {
- ParamChecker.notEmpty(user, "user");
- C fsConf = (C) ((conf instanceof JobConf) ? new JobConf() : new
Configuration());
- XConfiguration.copy(conf, fsConf);
- fsConf.set("user.name", user);
- return fsConf;
- }
-
- /**
- * Add a file to the ClassPath via the DistributedCache.
- */
public void addFileToClassPath(String user, String group, final Path file,
final Configuration conf)
throws IOException {
- Configuration defaultConf = createConfiguration(user, group, conf);
- DistributedCache.addFileToClassPath(file, defaultConf);
- DistributedCache.addFileToClassPath(file, conf);
+ ParamChecker.notEmpty(user, "user");
+ try {
+ UserGroupInformation ugi = getUGI(user);
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ Configuration defaultConf = new Configuration();
+ XConfiguration.copy(conf, defaultConf);
+ //Doing this NOP add first to have the FS created and
cached
+ DistributedCache.addFileToClassPath(file, defaultConf);
+
+ DistributedCache.addFileToClassPath(file, conf);
+ return null;
+ }
+ });
+
+ }
+ catch (InterruptedException ex) {
+ throw new IOException(ex);
+ }
+
}
}
Modified: incubator/oozie/trunk/core/src/main/resources/oozie-default.xml
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/main/resources/oozie-default.xml?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
--- incubator/oozie/trunk/core/src/main/resources/oozie-default.xml (original)
+++ incubator/oozie/trunk/core/src/main/resources/oozie-default.xml Mon Mar 5
23:19:01 2012
@@ -69,7 +69,7 @@
org.apache.oozie.service.UUIDService,
org.apache.oozie.service.ELService,
org.apache.oozie.service.AuthorizationService,
- org.apache.oozie.service.KerberosHadoopAccessorService,
+ org.apache.oozie.service.HadoopAccessorService,
org.apache.oozie.service.MemoryLocksService,
org.apache.oozie.service.DagXLogInfoService,
org.apache.oozie.service.SchemaService,
Modified:
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/MainTestCase.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/MainTestCase.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/MainTestCase.java
(original)
+++
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/MainTestCase.java
Mon Mar 5 23:19:01 2012
@@ -17,15 +17,26 @@
*/
package org.apache.oozie.action.hadoop;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.oozie.test.XFsTestCase;
+import java.security.PrivilegedExceptionAction;
import java.util.concurrent.Callable;
public abstract class MainTestCase extends XFsTestCase implements
Callable<Void> {
- //TODO remove this trick when we compile 20.100 onwards
+ public static void execute(String user, final Callable<Void> callable)
throws Exception {
+ UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
UserGroupInformation.getLoginUser());
+ ugi.doAs(new PrivilegedExceptionAction<Void>() {
+ public Void run() throws Exception {
+ callable.call();
+ return null;
+ }
+ });
+ }
+
public void testMain() throws Exception {
- DoAs.call(getTestUser(), this);
+ execute(getTestUser(), this);
}
}
Modified:
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/PigTestCase.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/PigTestCase.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/PigTestCase.java
(original)
+++
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/PigTestCase.java
Mon Mar 5 23:19:01 2012
@@ -47,8 +47,7 @@ public abstract class PigTestCase extend
public void testPigScript() throws Exception {
pigScript = commonPigScript;
writeStats = true;
- DoAs.call(getTestUser(), this);
-
+ MainTestCase.execute(getTestUser(), this);
}
// testing embedded Pig feature of Pig 0.9
@@ -84,7 +83,7 @@ public abstract class PigTestCase extend
+ "\nQ.runSingle()";
writeStats = false;
- DoAs.call(getTestUser(), this);
+ MainTestCase.execute(getTestUser(), this);
}
}
Modified:
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/ShellTestCase.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/ShellTestCase.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/ShellTestCase.java
(original)
+++
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/action/hadoop/ShellTestCase.java
Mon Mar 5 23:19:01 2012
@@ -39,7 +39,7 @@ public abstract class ShellTestCase exte
scriptContent = SUCCESS_SHELL_SCRIPT_CONTENT;
scriptName = "script.sh";
expectedSuccess = true;
- DoAs.call(getTestUser(), this);
+ MainTestCase.execute(getTestUser(), this);
}
/**
@@ -51,6 +51,6 @@ public abstract class ShellTestCase exte
scriptContent = FAIL_SHELLSCRIPT_CONTENT;
scriptName = "script.sh";
expectedSuccess = false;
- DoAs.call(getTestUser(), this);
+ MainTestCase.execute(getTestUser(), this);
}
}
Modified:
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
(original)
+++
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
Mon Mar 5 23:19:01 2012
@@ -28,7 +28,11 @@ public class TestHadoopAccessorService e
protected void setUp() throws Exception {
super.setUp();
- setSystemProperty(Services.CONF_SERVICE_EXT_CLASSES,
HadoopAccessorService.class.getName());
+ if (System.getProperty("oozie.test.hadoop.security",
"simple").equals("kerberos")) {
+
setSystemProperty("oozie.service.HadoopAccessorService.kerberos.enabled",
"true");
+
setSystemProperty("oozie.service.HadoopAccessorService.keytab.file",
getKeytabFile());
+
setSystemProperty("oozie.service.HadoopAccessorService.kerberos.principal",
getOoziePrincipal());
+ }
Services services = new Services();
services.init();
}
@@ -43,7 +47,6 @@ public class TestHadoopAccessorService e
HadoopAccessorService has = services.get(HadoopAccessorService.class);
assertNotNull(has);
}
-
public void testAccessor() throws Exception {
Services services = Services.get();
HadoopAccessorService has = services.get(HadoopAccessorService.class);
@@ -53,25 +56,41 @@ public class TestHadoopAccessorService e
injectKerberosInfo(conf);
URI uri = new URI(getNameNodeUri());
- String user = getTestUser() + "-invalid";
+ //valid user
+ String user = getTestUser();
String group = getTestGroup();
- try {
- has.createJobClient(null, group, conf);
- fail();
- }
- catch (IllegalArgumentException ex) {
- }
-
- user = getTestUser();
JobClient jc = has.createJobClient(user, group, conf);
assertNotNull(jc);
-
FileSystem fs = has.createFileSystem(user, group, conf);
assertNotNull(fs);
-
fs = has.createFileSystem(user, group, uri, conf);
assertNotNull(fs);
+
+ //invalid user
+
+ user = "invalid";
+
+ try {
+ has.createJobClient(user, group, conf);
+ fail();
+ }
+ catch (Throwable ex) {
+ }
+
+ try {
+ has.createFileSystem(user, group, conf);
+ fail();
+ }
+ catch (Throwable ex) {
+ }
+
+ try {
+ has.createFileSystem(user, group, uri, conf);
+ fail();
+ }
+ catch (Throwable ex) {
+ }
}
}
Modified:
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/core/src/test/java/org/apache/oozie/test/XFsTestCase.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
(original)
+++
incubator/oozie/trunk/core/src/test/java/org/apache/oozie/test/XFsTestCase.java
Mon Mar 5 23:19:01 2012
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.permission.F
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.oozie.util.XLog;
-import org.apache.oozie.util.XConfiguration;
import org.apache.oozie.service.HadoopAccessorException;
import org.apache.oozie.service.HadoopAccessorService;
@@ -64,17 +63,8 @@ public abstract class XFsTestCase extend
conf.set("local.realm", getRealm());
injectKerberosInfo(conf);
- Class hasClass;
- //TODO change this for a hardcoded instantiation when we only compile
20.100 onwards
- if (System.getProperty("hadoop20",
"false").toLowerCase().equals("false")) {
- hasClass =
Class.forName("org.apache.oozie.service.KerberosHadoopAccessorService");
- }
- else {
- hasClass = HadoopAccessorService.class;
- }
-
- has = (HadoopAccessorService) hasClass.newInstance();
+ has = new HadoopAccessorService();
has.init(conf);
fileSystem = has.createFileSystem(getTestUser(), getTestGroup(), new
URI(getNameNodeUri()), conf);
Path path = new Path(fileSystem.getWorkingDirectory(),
getTestCaseDir().substring(1));
Modified: incubator/oozie/trunk/docs/src/site/twiki/AG_Install.twiki
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/docs/src/site/twiki/AG_Install.twiki?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
--- incubator/oozie/trunk/docs/src/site/twiki/AG_Install.twiki (original)
+++ incubator/oozie/trunk/docs/src/site/twiki/AG_Install.twiki Mon Mar 5
23:19:01 2012
@@ -268,7 +268,7 @@ It should be the same keytab file Oozie
---+++ Oozie Hadoop Authentication Configuration
-Oozie can work with Hadoop 20 with Security distribution which supports
Kerberos authentication.
+Oozie works with Hadoop versions which support Kerberos authentication.
Oozie Hadoop authentication is configured using the following configuration
properties (default values shown):
@@ -281,12 +281,6 @@ Oozie Hadoop authentication is configure
The above default values are for a Hadoop 0.20 secure distribution (with
support for Kerberos authentication).
-To use a Hadoop 20 distribution pre-security (i.e. 0.20.1) the following
property must be set:
-
-<verbatim>
- oozie.services.ext=org.apache.oozie.service.HadoopAccessorService
-</verbatim>
-
To enable Kerberos authentication, the following property must be set:
<verbatim>
@@ -306,7 +300,8 @@ as a proxy user.
---+++ User ProxyUser Configuration
-Oozie supports impersonation or proxyuser functionality (identical to Hadoop
proxyuser capabilities and conceptually similar to Unix 'sudo').
+Oozie supports impersonation or proxyuser functionality (identical to Hadoop
proxyuser capabilities and conceptually
+similar to Unix 'sudo').
Proxyuser enables other systems that are Oozie clients to submit jobs on
behalf of other users.
Modified: incubator/oozie/trunk/docs/src/site/twiki/ENG_Building.twiki
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/docs/src/site/twiki/ENG_Building.twiki?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
--- incubator/oozie/trunk/docs/src/site/twiki/ENG_Building.twiki (original)
+++ incubator/oozie/trunk/docs/src/site/twiki/ENG_Building.twiki Mon Mar 5
23:19:01 2012
@@ -119,8 +119,6 @@ undefined (Hadoop JARs are not included)
*test*= (*): runs a single test case, to run a test give the test class name
without package and extension, no default
-*hadoop20*= (*) : indicates the build/test should not include classes for
Hadoop 20S, default is 'false'
-
*oozie.test.db*= (*): indicates the database to use for running the testcases,
supported values are 'hsqldb', 'derby',
'mysql', 'postgres' and 'oracle'; default value is 'hsqldb'. For each
database there is
=core/src/test/resources/DATABASE-oozie-site.xml= file preconfigured.
Modified:
incubator/oozie/trunk/examples/src/test/java/org/apache/oozie/example/TestLocalOozieExample.java
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/examples/src/test/java/org/apache/oozie/example/TestLocalOozieExample.java?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
---
incubator/oozie/trunk/examples/src/test/java/org/apache/oozie/example/TestLocalOozieExample.java
(original)
+++
incubator/oozie/trunk/examples/src/test/java/org/apache/oozie/example/TestLocalOozieExample.java
Mon Mar 5 23:19:01 2012
@@ -18,25 +18,17 @@
package org.apache.oozie.example;
import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.oozie.DagEngine;
import org.apache.oozie.service.Services;
-import org.apache.oozie.service.WorkflowAppService;
import org.apache.oozie.test.XFsTestCase;
-import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.IOUtils;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.XLog;
-import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.action.hadoop.DoAs;
-import org.apache.oozie.client.OozieClient;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.util.concurrent.Callable;
public class TestLocalOozieExample extends XFsTestCase {
private String oozieLocalLog;
Modified: incubator/oozie/trunk/release-log.txt
URL:
http://svn.apache.org/viewvc/incubator/oozie/trunk/release-log.txt?rev=1297282&r1=1297281&r2=1297282&view=diff
==============================================================================
--- incubator/oozie/trunk/release-log.txt (original)
+++ incubator/oozie/trunk/release-log.txt Mon Mar 5 23:19:01 2012
@@ -1,5 +1,6 @@
-- Oozie 3.2.0 release
+OOZIE-734 Simplify Kerberos/HadoopAccessorService code and remove
Kerberos/DoAs code (tucu)
OOZIE-631 Oozie DB create/upgrade tool (tucu)
OOZIE-729 SubmitMRCommand & SubmitMRXCommant testcases fail with Hadoop 0.23
(tucu)
OOZIE-726 Removing switch from default runtime scope to compile scope for
hadooplib.xml assembly due to it pulling in JDK tools.jar with compile scope.
(abayer via tucu)