Author: cdouglas
Date: Mon May 4 21:38:48 2009
New Revision: 771456
URL: http://svn.apache.org/viewvc?rev=771456&view=rev
Log:
HADOOP-5364. Add certificate expiration warning to HsftpFileSystem and HDFS
proxy. Contributed by Zhiyong Zhang
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=771456&r1=771455&r2=771456&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Mon May 4 21:38:48 2009
@@ -292,6 +292,9 @@
HADOOP-5217. Split AllTestDriver for core, hdfs and mapred. (sharad)
+ HADOOP-5364. Add certificate expiration warning to HsftpFileSystem and HDFS
+ proxy. (Zhiyong Zhang via cdouglas)
+
OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a
Modified:
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java?rev=771456&r1=771455&r2=771456&view=diff
==============================================================================
---
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
(original)
+++
hadoop/core/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyUtil.java
Mon May 4 21:38:48 2009
@@ -26,12 +26,12 @@
import java.net.URISyntaxException;
import java.net.URL;
import java.security.cert.X509Certificate;
+import java.util.Date;
import java.util.Set;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSession;
-import javax.net.ssl.SSLSocketFactory;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
@@ -39,7 +39,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSInputStream;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.HostsFileReader;
@@ -50,9 +49,11 @@
*/
public class ProxyUtil {
public static final Log LOG = LogFactory.getLog(ProxyUtil.class);
+ private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
+ private static final int CERT_EXPIRATION_WARNING_THRESHOLD = 30; // 30 days
warning
private static enum UtilityOption {
- RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get");
+ RELOAD("-reloadPermFiles"), CLEAR("-clearUgiCache"), GET("-get"),
CHECKCERTS("-checkcerts");
private String name = null;
@@ -136,7 +137,7 @@
sb.append("\n Client certificate Subject Name is "
+ cert.getSubjectX500Principal().getName());
} else {
- sb.append("\n No Client certs was found");
+ sb.append("\n No client certificates were found");
}
X509Certificate[] serverCerts = (X509Certificate[])
connection.getServerCertificates();
if (serverCerts != null) {
@@ -144,7 +145,7 @@
sb.append("\n Server certificate Subject Name is "
+ cert.getSubjectX500Principal().getName());
} else {
- sb.append("\n No Server certs was found");
+ sb.append("\n No server certificates were found");
}
LOG.debug(sb.toString());
}
@@ -200,17 +201,48 @@
}
});
}
+
+ static void checkServerCertsExpirationDays(Configuration conf, String
hostname, int port) throws IOException {
+ setupSslProps(conf);
+ HttpsURLConnection connection = null;
+ connection = openConnection(hostname, port, null);
+ connection.connect();
+ X509Certificate[] serverCerts = (X509Certificate[])
connection.getServerCertificates();
+ Date curDate = new Date();
+ long curTime = curDate.getTime();
+ if (serverCerts != null) {
+ for (X509Certificate cert : serverCerts) {
+ StringBuffer sb = new StringBuffer();
+ sb.append("\n Server certificate Subject Name: " +
cert.getSubjectX500Principal().getName());
+ Date expDate = cert.getNotAfter();
+ long expTime = expDate.getTime();
+ int dayOffSet = (int) ((expTime - curTime)/MM_SECONDS_PER_DAY);
+ sb.append(" have " + dayOffSet + " days to expire");
+ if (dayOffSet < CERT_EXPIRATION_WARNING_THRESHOLD)
LOG.warn(sb.toString());
+ else LOG.info(sb.toString());
+ }
+ } else {
+ LOG.info("\n No Server certs was found");
+ }
+
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
public static void main(String[] args) throws Exception {
if(args.length < 1 ||
(!UtilityOption.RELOAD.getName().equalsIgnoreCase(args[0])
&& !UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])
- && !UtilityOption.GET.getName().equalsIgnoreCase(args[0])) ||
- (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) &&
args.length != 4)) {
+ && !UtilityOption.GET.getName().equalsIgnoreCase(args[0])
+ && !UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]))
||
+ (UtilityOption.GET.getName().equalsIgnoreCase(args[0]) &&
args.length != 4) ||
+ (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0]) &&
args.length != 3)) {
System.err.println("Usage: ProxyUtil ["
+ UtilityOption.RELOAD.getName() + "] | ["
+ UtilityOption.CLEAR.getName() + "] | ["
- + UtilityOption.GET.getName() + " <hostname> <#port> <path> ]");
+ + UtilityOption.GET.getName() + " <hostname> <#port> <path> ] | ["
+ + UtilityOption.CHECKCERTS.getName() + " <hostname> <#port> ]");
System.exit(0);
}
Configuration conf = new Configuration(false);
@@ -223,6 +255,8 @@
} else if (UtilityOption.CLEAR.getName().equalsIgnoreCase(args[0])) {
// clear UGI caches
sendCommand(conf, "/clearUgiCache");
+ } else if (UtilityOption.CHECKCERTS.getName().equalsIgnoreCase(args[0])) {
+ checkServerCertsExpirationDays(conf, args[1], Integer.parseInt(args[2]));
} else {
String hostname = args[1];
int port = Integer.parseInt(args[2]);
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=771456&r1=771455&r2=771456&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Mon
May 4 21:38:48 2009
@@ -116,7 +116,10 @@
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
- return (HttpURLConnection)url.openConnection();
+ HttpURLConnection connection = (HttpURLConnection)url.openConnection();
+ connection.setRequestMethod("GET");
+ connection.connect();
+ return connection;
} catch (URISyntaxException e) {
throw (IOException)new IOException().initCause(e);
}
@@ -126,8 +129,6 @@
public FSDataInputStream open(Path f, int buffersize) throws IOException {
HttpURLConnection connection = null;
connection = openConnection("/data" + f.toUri().getPath(), "ugi=" + ugi);
- connection.setRequestMethod("GET");
- connection.connect();
final InputStream in = connection.getInputStream();
return new FSDataInputStream(new FSInputStream() {
public int read() throws IOException {
@@ -199,8 +200,6 @@
xr.setContentHandler(this);
HttpURLConnection connection = openConnection("/listPaths" + path,
"ugi=" + ugi + (recur? "&recursive=yes" : ""));
- connection.setRequestMethod("GET");
- connection.connect();
InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp));
@@ -268,10 +267,6 @@
try {
final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
-
- connection.setRequestMethod("GET");
- connection.connect();
-
xr.parse(new InputSource(connection.getInputStream()));
} catch(SAXException e) {
final Exception embedded = e.getException();
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java?rev=771456&r1=771455&r2=771456&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/HsftpFileSystem.java Mon
May 4 21:38:48 2009
@@ -23,13 +23,16 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
-import java.net.UnknownHostException;
+import java.security.cert.X509Certificate;
+
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSession;
import org.apache.hadoop.conf.Configuration;
+
+
/** An implementation of a protocol for accessing filesystems over HTTPS.
* The following implementation provides a limited, read-only interface
* to a filesystem over HTTPS.
@@ -38,10 +41,15 @@
*/
public class HsftpFileSystem extends HftpFileSystem {
+ private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
+ private volatile int ExpWarnDays = 0;
+
+
@Override
public void initialize(URI name, Configuration conf) throws IOException {
super.initialize(name, conf);
setupSsl(conf);
+ ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
}
/** Set up SSL resources */
@@ -64,7 +72,7 @@
System.setProperty("javax.net.ssl.keyStoreType", sslConf.get(
"ssl.client.keystore.type", "jks"));
}
-
+
@Override
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
@@ -74,6 +82,31 @@
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
// bypass hostname verification
conn.setHostnameVerifier(new DummyHostnameVerifier());
+ conn.setRequestMethod("GET");
+ conn.connect();
+
+ // check cert expiration date
+ final int warnDays = ExpWarnDays;
+ if (warnDays > 0) { // make sure only check once
+ ExpWarnDays = 0;
+ long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY
+ + System.currentTimeMillis();
+ X509Certificate[] clientCerts = (X509Certificate[]) conn
+ .getLocalCertificates();
+ if (clientCerts != null) {
+ for (X509Certificate cert : clientCerts) {
+ long expTime = cert.getNotAfter().getTime();
+ if (expTime < expTimeThreshold) {
+ StringBuffer sb = new StringBuffer();
+ sb.append("\n Client certificate "
+ + cert.getSubjectX500Principal().getName());
+ int dayOffSet = (int) ((expTime -
System.currentTimeMillis())/MM_SECONDS_PER_DAY);
+ sb.append(" have " + dayOffSet + " days to expire");
+ LOG.warn(sb.toString());
+ }
+ }
+ }
+ }
return (HttpURLConnection)conn;
} catch (URISyntaxException e) {
throw (IOException)new IOException().initCause(e);