Author: stevel
Date: Mon Dec 7 20:24:32 2009
New Revision: 888123
URL: http://svn.apache.org/viewvc?rev=888123&view=rev
Log:
HADOOP-6194: Service lifecycle
Added:
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/
- copied from r888120, hadoop/common/trunk/src/contrib/cloud/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/README.txt
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/README.txt
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/
- copied from r888120, hadoop/common/trunk/src/contrib/cloud/src/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/integration-test/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/integration-test/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/integration-test/create-ebs-snapshot.sh
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/integration-test/create-ebs-snapshot.sh
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/integration-test/ebs-storage-spec.json
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/integration-test/ebs-storage-spec.json
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/integration-test/persistent-cluster.sh
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/integration-test/persistent-cluster.sh
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/integration-test/transient-cluster.sh
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/integration-test/transient-cluster.sh
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/
- copied from r888120, hadoop/common/trunk/src/contrib/cloud/src/py/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/VERSION
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/VERSION
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop-ec2
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop-ec2
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop-ec2-init-remote.sh
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop-ec2-init-remote.sh
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/__init__.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/__init__.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/__init__.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/__init__.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/cli.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/cli.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/cluster.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/cluster.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/commands.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/commands.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/providers/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/providers/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/providers/__init__.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/providers/__init__.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/providers/dummy.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/providers/dummy.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/providers/ec2.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/providers/ec2.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/storage.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/storage.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/py/hadoop/cloud/util.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/py/hadoop/cloud/util.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/
- copied from r888120, hadoop/common/trunk/src/contrib/cloud/src/test/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/__init__.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/__init__.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/
- copied from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/__init__.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/__init__.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/alltests.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/alltests.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/testcluster.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/testcluster.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/teststorage.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/teststorage.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/testuserdata.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/testuserdata.py
hadoop/common/branches/HADOOP-6194/src/contrib/cloud/src/test/hadoop/cloud/testutil.py
- copied unchanged from r888120,
hadoop/common/trunk/src/contrib/cloud/src/test/hadoop/cloud/testutil.py
Removed:
hadoop/common/branches/HADOOP-6194/src/test/hadoop-site.xml
Modified:
hadoop/common/branches/HADOOP-6194/ (props changed)
hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath
hadoop/common/branches/HADOOP-6194/CHANGES.txt (contents, props changed)
hadoop/common/branches/HADOOP-6194/build.xml
hadoop/common/branches/HADOOP-6194/src/contrib/ec2/ (props changed)
hadoop/common/branches/HADOOP-6194/src/docs/ (props changed)
hadoop/common/branches/HADOOP-6194/src/java/ (props changed)
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
hadoop/common/branches/HADOOP-6194/src/test/core/ (props changed)
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
Propchange: hadoop/common/branches/HADOOP-6194/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,2 +1,2 @@
-/hadoop/common/trunk:804966-885782
+/hadoop/common/trunk:804966-888120
/hadoop/core/branches/branch-0.19/core:713112
Modified: hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath (original)
+++ hadoop/common/branches/HADOOP-6194/.eclipse.templates/.classpath Mon Dec 7
20:24:32 2009
@@ -10,8 +10,8 @@
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-codec-1.3.jar"/>
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-el-1.0.jar"/>
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-httpclient-3.0.1.jar"/>
- <classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-logging-1.0.4.jar"/>
- <classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-logging-api-1.0.4.jar"/>
+ <classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-logging-1.1.1.jar"/>
+ <classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-logging-api-1.1.jar"/>
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/commons-net-1.4.1.jar"/>
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/core-3.1.1.jar"/>
<classpathentry kind="lib"
path="build/ivy/lib/Hadoop-Core/common/jackson-core-asl-1.0.1.jar"/>
Modified: hadoop/common/branches/HADOOP-6194/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/CHANGES.txt?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/CHANGES.txt (original)
+++ hadoop/common/branches/HADOOP-6194/CHANGES.txt Mon Dec 7 20:24:32 2009
@@ -14,6 +14,8 @@
and the init of the class is made to take a Configuration argument.
(Jakob Homan via ddas)
+ HADOOP-6108. Add support for EBS storage on EC2. (tomwhite)
+
IMPROVEMENTS
HADOOP-6283. Improve the exception messages thrown by
@@ -36,6 +38,8 @@
HADOOP-6366. Reduce ivy console output to ovservable level (cos)
+ HADOOP-6400. Log errors getting Unix UGI. (Todd Lipcon via tomwhite)
+
OPTIMIZATIONS
BUG FIXES
@@ -50,6 +54,11 @@
HADOOP-6398. Build is broken after HADOOP-6395 patch has been applied (cos)
+ HADOOP-6405. Update Eclipse configuration to match changes to Ivy
+ configuration (Edwin Chan via cos)
+
+ HADOOP-6411. Remove deprecated file src/test/hadoop-site.xml. (cos)
+
Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES
@@ -138,6 +147,9 @@
HADOOP-6303. Eclipse .classpath template has outdated jar files and is
missing some new ones. (cos)
+ HADOOP-6396. Fix uninformative exception message when unable to parse
+ umask. (jghoman)
+
NEW FEATURES
HADOOP-4268. Change fsck to use ClientProtocol methods so that the
@@ -682,6 +694,9 @@
HADOOP-6261. Add URI based tests for FileContext.
(Ravi Pulari via suresh).
+ HADOOP-6307. Add a new SequenceFile.Reader constructor in order to support
+ reading on un-closed file. (szetszwo)
+
BUG FIXES
HADOOP-5379. CBZip2InputStream to throw IOException on data crc error.
Propchange: hadoop/common/branches/HADOOP-6194/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,4 +1,4 @@
-/hadoop/common/trunk/CHANGES.txt:804966-885782
+/hadoop/common/trunk/CHANGES.txt:804966-888120
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
/hadoop/core/branches/branch-0.19/CHANGES.txt:713112
/hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278
Modified: hadoop/common/branches/HADOOP-6194/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/build.xml?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-6194/build.xml (original)
+++ hadoop/common/branches/HADOOP-6194/build.xml Mon Dec 7 20:24:32 2009
@@ -1200,6 +1200,7 @@
<exclude name="**/native/*"/>
<exclude name="**/native/config/*"/>
<exclude name="**/VERSION"/>
+ <exclude name="**/*.json"/>
<exclude name="**/hod/*.txt"/>
</fileset>
</rat:report>
Propchange: hadoop/common/branches/HADOOP-6194/src/contrib/ec2/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/src/contrib/ec2:804966-885782
+/hadoop/common/trunk/src/contrib/ec2:804966-888120
/hadoop/core/branches/branch-0.19/core/src/contrib/ec2:713112
/hadoop/core/trunk/src/contrib/ec2:776175-784663
Propchange: hadoop/common/branches/HADOOP-6194/src/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/src/docs:804966-885782
+/hadoop/common/trunk/src/docs:804966-888120
/hadoop/core/branches/HADOOP-4687/core/src/docs:776175-786719
/hadoop/core/branches/branch-0.19/src/docs:713112
Propchange: hadoop/common/branches/HADOOP-6194/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/src/java:804966-885782
+/hadoop/common/trunk/src/java:804966-888120
/hadoop/core/branches/branch-0.19/core/src/java:713112
/hadoop/core/trunk/src/core:776175-785643,785929-786278
Modified:
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
---
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java
(original)
+++
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/fs/permission/FsPermission.java
Mon Dec 7 20:24:32 2009
@@ -21,8 +21,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.Writable;
@@ -33,8 +31,6 @@
* A class for file/directory permissions.
*/
public class FsPermission implements Writable {
- private static final Log LOG = LogFactory.getLog(FsPermission.class);
-
static final WritableFactory FACTORY = new WritableFactory() {
public Writable newInstance() { return new FsPermission(); }
};
@@ -182,7 +178,8 @@
otheraction.and(umask.otheraction.not()));
}
- /** umask property label Deprecated key may be removed in version .23 */
+ /** umask property label deprecated key and code in getUMask method
+ * to accommodate it may be removed in version .23 */
public static final String DEPRECATED_UMASK_LABEL = "dfs.umask";
public static final String UMASK_LABEL =
CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY;
@@ -198,10 +195,19 @@
if(conf != null) {
String confUmask = conf.get(UMASK_LABEL);
if(confUmask != null) { // UMASK_LABEL is set
- if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL))
- umask = Integer.parseInt(confUmask); // Evaluate as decimal value
- else
- umask = new UmaskParser(confUmask).getUMask();
+ try {
+ if(conf.deprecatedKeyWasSet(DEPRECATED_UMASK_LABEL))
+ umask = Integer.parseInt(confUmask); // Evaluate as decimal value
+ else
+ umask = new UmaskParser(confUmask).getUMask();
+ } catch(IllegalArgumentException iae) {
+ // Provide more explanation for user-facing message
+ String type = iae instanceof NumberFormatException ? "decimal"
+ : "octal or
symbolic";
+
+ throw new IllegalArgumentException("Unable to parse " + confUmask +
+ " as " + type + " umask.");
+ }
}
}
Modified:
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
---
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java
(original)
+++
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/io/SequenceFile.java
Mon Dec 7 20:24:32 2009
@@ -1435,32 +1435,71 @@
private DeserializerBase keyDeserializer;
private DeserializerBase valDeserializer;
- /** Open the named file. */
+ /**
+ * Construct a reader by opening a file from the given file system.
+ * @param fs The file system used to open the file.
+ * @param file The file being read.
+ * @param conf Configuration
+ * @throws IOException
+ */
public Reader(FileSystem fs, Path file, Configuration conf)
throws IOException {
this(fs, file, conf.getInt("io.file.buffer.size", 4096), conf, false);
}
+ /**
+ * Construct a reader by the given input stream.
+ * @param in An input stream.
+ * @param buffersize The buffer size used to read the file.
+ * @param start The starting position.
+ * @param length The length being read.
+ * @param conf Configuration
+ * @throws IOException
+ */
+ public Reader(FSDataInputStream in, int buffersize,
+ long start, long length, Configuration conf) throws IOException {
+ this(null, null, in, buffersize, start, length, conf, false);
+ }
+
private Reader(FileSystem fs, Path file, int bufferSize,
Configuration conf, boolean tempReader) throws IOException {
- this(fs, file, bufferSize, 0, fs.getFileStatus(file).getLen(), conf,
tempReader);
+ this(fs, file, null, bufferSize, 0, fs.getFileStatus(file).getLen(),
+ conf, tempReader);
}
-
- private Reader(FileSystem fs, Path file, int bufferSize, long start,
- long length, Configuration conf, boolean tempReader)
- throws IOException {
+
+ /**
+ * Private constructor.
+ * @param fs The file system used to open the file.
+ * It is not used if the given input stream is not null.
+ * @param file The file being read.
+ * @param in An input stream of the file. If it is null,
+ * the file will be opened from the given file system.
+ * @param bufferSize The buffer size used to read the file.
+ * @param start The starting position.
+ * @param length The length being read.
+ * @param conf Configuration
+ * @param tempReader Is this temporary?
+ * @throws IOException
+ */
+ private Reader(FileSystem fs, Path file, FSDataInputStream in,
+ int bufferSize, long start, long length, Configuration conf,
+ boolean tempReader) throws IOException {
+ if (fs == null && in == null) {
+ throw new IllegalArgumentException("fs == null && in == null");
+ }
+
this.file = file;
- this.in = openFile(fs, file, bufferSize, length);
+ this.in = in != null? in: openFile(fs, file, bufferSize, length);
this.conf = conf;
boolean succeeded = false;
try {
seek(start);
- this.end = in.getPos() + length;
+ this.end = this.in.getPos() + length;
init(tempReader);
succeeded = true;
} finally {
if (!succeeded) {
- IOUtils.cleanup(LOG, in);
+ IOUtils.cleanup(LOG, this.in);
}
}
}
@@ -1468,6 +1507,13 @@
/**
* Override this method to specialize the type of
* {...@link FSDataInputStream} returned.
+ * @param fs The file system used to open the file.
+ * @param file The file being read.
+ * @param bufferSize The buffer size used to read the file.
+ * @param length The length being read if it is >= 0. Otherwise,
+ * the length is not available.
+ * @return The opened stream.
+ * @throws IOException
*/
protected FSDataInputStream openFile(FileSystem fs, Path file,
int bufferSize, long length) throws IOException {
@@ -1489,7 +1535,7 @@
if ((versionBlock[0] != VERSION[0]) ||
(versionBlock[1] != VERSION[1]) ||
(versionBlock[2] != VERSION[2]))
- throw new IOException(file + " not a SequenceFile");
+ throw new IOException(this + " not a SequenceFile");
// Set 'version'
version = versionBlock[3];
@@ -2251,7 +2297,7 @@
/** Returns the name of the file. */
public String toString() {
- return file.toString();
+ return file == null? "<unknown>": file.toString();
}
}
@@ -3132,7 +3178,7 @@
if (fs.getUri().getScheme().startsWith("ramfs")) {
bufferSize = conf.getInt("io.bytes.per.checksum", 512);
}
- Reader reader = new Reader(fs, segmentPathName,
+ Reader reader = new Reader(fs, segmentPathName, null,
bufferSize, segmentOffset,
segmentLength, conf, false);
Modified:
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
---
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
(original)
+++
hadoop/common/branches/HADOOP-6194/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
Mon Dec 7 20:24:32 2009
@@ -244,6 +244,7 @@
try {
userName = getUnixUserName();
} catch (Exception e) {
+ LOG.warn("Couldn't get unix username, using " + DEFAULT_USERNAME, e);
userName = DEFAULT_USERNAME;
}
@@ -263,6 +264,7 @@
try {
groupNames = getUnixGroups();
} catch (Exception e) {
+ LOG.warn("Couldn't get unix groups, using " + DEFAULT_GROUP, e);
groupNames = new String[1];
groupNames[0] = DEFAULT_GROUP;
}
Propchange: hadoop/common/branches/HADOOP-6194/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 7 20:24:32 2009
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/src/test/core:804966-885782
+/hadoop/common/trunk/src/test/core:804966-888120
/hadoop/core/branches/branch-0.19/core/src/test/core:713112
/hadoop/core/trunk/src/test/core:776175-785643,785929-786278
Modified:
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
---
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java
(original)
+++
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/fs/permission/TestFsPermission.java
Mon Dec 7 20:24:32 2009
@@ -157,11 +157,18 @@
FsPermission.getUMask(conf);
fail("Shouldn't have been able to parse bad umask");
} catch(IllegalArgumentException iae) {
- assertEquals(iae.getMessage(), b);
+ assertTrue("Exception should specify parsing error and invalid umask:
"
+ + iae.getMessage(), isCorrectExceptionMessage(iae.getMessage(),
b));
}
}
}
+ private boolean isCorrectExceptionMessage(String msg, String umask) {
+ return msg.contains("Unable to parse") &&
+ msg.contains(umask) &&
+ msg.contains("octal or symbolic");
+ }
+
// Ensure that when the deprecated decimal umask key is used, it is correctly
// parsed as such and converted correctly to an FsPermission value
public void testDeprecatedUmask() {
Modified:
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java?rev=888123&r1=888122&r2=888123&view=diff
==============================================================================
---
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
(original)
+++
hadoop/common/branches/HADOOP-6194/src/test/core/org/apache/hadoop/io/TestSequenceFileSync.java
Mon Dec 7 20:24:32 2009
@@ -18,22 +18,17 @@
package org.apache.hadoop.io;
-import java.io.File;
+import static org.junit.Assert.assertEquals;
+
import java.io.IOException;
import java.util.Random;
-import java.net.URI;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.junit.Before;
import org.junit.Test;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import static org.junit.Assert.*;
-
public class TestSequenceFileSync {
private static final int NUMRECORDS = 2000;
private static final int RECORDSIZE = 80;
@@ -66,8 +61,18 @@
try {
writeSequenceFile(writer, NUMRECORDS);
for (int i = 0; i < 5 ; i++) {
- final SequenceFile.Reader reader =
- new SequenceFile.Reader(fs, path, conf);
+ final SequenceFile.Reader reader;
+
+ //try different SequenceFile.Reader constructors
+ if (i % 2 == 0) {
+ reader = new SequenceFile.Reader(fs, path, conf);
+ } else {
+ final FSDataInputStream in = fs.open(path);
+ final long length = fs.getFileStatus(path).getLen();
+ final int buffersize = conf.getInt("io.file.buffer.size", 4096);
+ reader = new SequenceFile.Reader(in, buffersize, 0L, length, conf);
+ }
+
try {
forOffset(reader, input, val, i, 0, 0);
forOffset(reader, input, val, i, 65, 0);