hadoop git commit: HDDS-155:Implement KeyValueContainer and adopt new disk layout for the containers. Contributed by Bharat Viswanadham

2018-06-14 Thread bharat
Repository: hadoop
Updated Branches:
  refs/heads/HDDS-48 9a5552bf7 -> 998e2850a


HDDS-155:Implement KeyValueContainer and adopt new disk layout for the 
containers. Contributed by Bharat Viswanadham


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/998e2850
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/998e2850
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/998e2850

Branch: refs/heads/HDDS-48
Commit: 998e2850a3ceb961d66b9d3398a1afaad63a5cd8
Parents: 9a5552b
Author: Bharat Viswanadham 
Authored: Thu Jun 14 20:54:54 2018 -0700
Committer: Bharat Viswanadham 
Committed: Thu Jun 14 20:54:54 2018 -0700

--
 .../org/apache/hadoop/ozone/OzoneConsts.java|   3 +
 .../org/apache/hadoop/ozone/common/Storage.java |   3 +-
 .../main/proto/DatanodeContainerProtocol.proto  |   5 +
 .../common/impl/KeyValueContainer.java  |  78 ---
 .../common/impl/KeyValueContainerData.java  |  21 +
 .../container/common/interfaces/Container.java  |  17 +-
 .../container/keyvalue/KeyValueContainer.java   | 544 +++
 .../keyvalue/KeyValueContainerLocationUtil.java | 140 +
 .../keyvalue/KeyValueContainerUtil.java | 148 +
 .../container/keyvalue/helpers/KeyUtils.java|  82 +++
 .../keyvalue/helpers/package-info.java  |  21 +
 .../ozone/container/keyvalue/package-info.java  |  21 +
 .../container/common/impl/TestContainerSet.java |   8 +-
 .../keyvalue/TestKeyValueContainer.java | 281 ++
 14 files changed, 1285 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/998e2850/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
--
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
index 36f830b..3b774a5 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
@@ -101,6 +101,9 @@ public final class OzoneConsts {
   public static final String DELETED_BLOCK_DB = "deletedBlock.db";
   public static final String KSM_DB_NAME = "ksm.db";
 
+  public static final String STORAGE_DIR_CHUNKS = "chunks";
+  public static final String CONTAINER_FILE_CHECKSUM_EXTENSION = ".chksm";
+
   /**
* Supports Bucket Versioning.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/998e2850/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
--
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
index fb30d92..2ff4626 100644
--- 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
+++ 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Storage.java
@@ -45,8 +45,9 @@ import java.util.Properties;
 public abstract class Storage {
   private static final Logger LOG = LoggerFactory.getLogger(Storage.class);
 
-  protected static final String STORAGE_DIR_CURRENT = "current";
+  public static final String STORAGE_DIR_CURRENT = "current";
   protected static final String STORAGE_FILE_VERSION = "VERSION";
+  public static final String CONTAINER_DIR = "containerdir";
 
   private final NodeType nodeType;
   private final File root;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/998e2850/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
--
diff --git a/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto 
b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
index 72e1006..88645be 100644
--- a/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
+++ b/hadoop-hdds/common/src/main/proto/DatanodeContainerProtocol.proto
@@ -132,6 +132,11 @@ enum Result {
   DELETE_ON_OPEN_CONTAINER = 26;
   CLOSED_CONTAINER_RETRY = 27;
   INVALID_CONTAINER_STATE = 28;
+  DISK_OUT_OF_SPACE = 29;
+  CONTAINER_ALREADY_EXISTS = 30;
+  CONTAINER_METADATA_ERROR = 31;
+  CONTAINER_FILES_CREATE_ERROR = 32;
+  CONTAINER_CHECKSUM_ERROR = 33;
 }
 
 /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/998e2850/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyValueContainer.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/KeyValueContainer.java
 

[15/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
new file mode 100644
index 000..4d9b3a2
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | 
jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof 
module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw
 new Error("jQuery requires a window with a document");return 
t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use 
strict";var 
n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function
 e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function 
e(t){return null!=t&===t.window},v={type:!0,src:!0,noModule:!0};function 
m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in 
v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function 
x(e){return null==e?e+"":"object"==typeof e||"function"==typeof 
e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new 
w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructo
 r:w,length:0,toArray:function(){return o.call(this)},get:function(e){return 
null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var 
t=w.merge(this.constructor(),e);return 
t.prevObject=this,t},each:function(e){return 
w.each(this,e)},map:function(e){return 
this.pushStack(w.map(this,function(t,n){return 
e.call(t,n,t)}))},slice:function(){return 
this.pushStack(o.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(e<0?t:0);return 
this.pushStack(n>=0&0& in e)}var E=function(e){var 
t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new 
Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return 
e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var
 n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new 
RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new 
RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new 
RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new 
RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new 
RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new
 RegExp("^(?:"+P+")$","i"),needsContext:new 
RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|n
 
th|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native
 \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new 
RegExp("([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var 
r="0x"+t-65536;return 
r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\u\w-]/g,ne=function(e,t){return
 t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" 
":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in 
e||"label"in 
e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var
 n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var 
o,s,l,c,f,h,v,m=t&,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof 
e
 ||!e||1!==T&&9!==T&&11!==T)return 
r;if(!i&&((t?t.ownerDocument||t:w)!==d&(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return
 r;if(l.id===o)return r.push(l),r}else 
if(m&&(l=m.getElementById(o))&(t,l)&===o)return 
r.push(l),r}else{if(f[2])return 
L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&)return
 L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" 
"]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else 
if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+"
 "+ve(h[s]);v=h.join(","),m=K.test(e)&(t.parentNode)||t}if(v)try{return 
L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&("id")}}}return
 u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return 
e.push(n+" ")>r.cacheLength& t[e.shift()],t[n+" "]=i}return t}function 
se(e){return e[b]=!0,
 e}function ue(e){var 

[12/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
--
diff --git a/hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js 
b/hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
index 38f046c..9b5206b 100644
--- a/hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
+++ b/hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/jquery.js
@@ -1,247 +1,173 @@
 /*!
- * jQuery JavaScript Library v1.10.2
- * http://jquery.com/
+ * jQuery JavaScript Library v3.3.1
+ * https://jquery.com/
  *
  * Includes Sizzle.js
- * http://sizzlejs.com/
+ * https://sizzlejs.com/
  *
- * Copyright 2005, 2013 jQuery Foundation, Inc. and other contributors
+ * Copyright JS Foundation and other contributors
  * Released under the MIT license
- * http://jquery.org/license
+ * https://jquery.org/license
  *
- * Date: 2013-07-03T13:48Z
+ * Date: 2018-01-20T17:24Z
  */
-(function( window, undefined ) {
-
-// Can't do this because several apps including ASP.NET trace
-// the stack via arguments.caller.callee and Firefox dies if
-// you try to trace through "use strict" call chains. (#13335)
-// Support: Firefox 18+
-//"use strict";
-var
-   // The deferred used on DOM ready
-   readyList,
-
-   // A central reference to the root jQuery(document)
-   rootjQuery,
-
-   // Support: IE<10
-   // For `typeof xmlNode.method` instead of `xmlNode.method !== undefined`
-   core_strundefined = typeof undefined,
+( function( global, factory ) {
+
+   "use strict";
+
+   if ( typeof module === "object" && typeof module.exports === "object" ) 
{
+
+   // For CommonJS and CommonJS-like environments where a proper 
`window`
+   // is present, execute the factory and get jQuery.
+   // For environments that do not have a `window` with a 
`document`
+   // (such as Node.js), expose a factory as module.exports.
+   // This accentuates the need for the creation of a real 
`window`.
+   // e.g. var jQuery = require("jquery")(window);
+   // See ticket #14549 for more info.
+   module.exports = global.document ?
+   factory( global, true ) :
+   function( w ) {
+   if ( !w.document ) {
+   throw new Error( "jQuery requires a 
window with a document" );
+   }
+   return factory( w );
+   };
+   } else {
+   factory( global );
+   }
 
-   // Use the correct document accordingly with window argument (sandbox)
-   location = window.location,
-   document = window.document,
-   docElem = document.documentElement,
+// Pass this if window is not defined yet
+} )( typeof window !== "undefined" ? window : this, function( window, noGlobal 
) {
 
-   // Map over jQuery in case of overwrite
-   _jQuery = window.jQuery,
+// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 
9.1
+// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict 
mode
+// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict 
mode should be common
+// enough that all such attempts are guarded in a try block.
+"use strict";
 
-   // Map over the $ in case of overwrite
-   _$ = window.$,
+var arr = [];
 
-   // [[Class]] -> type pairs
-   class2type = {},
+var document = window.document;
 
-   // List of deleted data cache ids, so we can reuse them
-   core_deletedIds = [],
+var getProto = Object.getPrototypeOf;
 
-   core_version = "1.10.2",
+var slice = arr.slice;
 
-   // Save a reference to some core methods
-   core_concat = core_deletedIds.concat,
-   core_push = core_deletedIds.push,
-   core_slice = core_deletedIds.slice,
-   core_indexOf = core_deletedIds.indexOf,
-   core_toString = class2type.toString,
-   core_hasOwn = class2type.hasOwnProperty,
-   core_trim = core_version.trim,
+var concat = arr.concat;
 
-   // Define a local copy of jQuery
-   jQuery = function( selector, context ) {
-   // The jQuery object is actually just the init constructor 
'enhanced'
-   return new jQuery.fn.init( selector, context, rootjQuery );
-   },
+var push = arr.push;
 
-   // Used for matching numbers
-   core_pnum = /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,
+var indexOf = arr.indexOf;
 
-   // Used for splitting on whitespace
-   core_rnotwhite = /\S+/g,
+var class2type = {};
 
-   // Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 
and IE)
-   rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,
+var toString = class2type.toString;
 
-   // A simple way to check for HTML strings
-   // 

[03/50] [abbrv] hadoop git commit: MAPREDUCE-7108. TestFileOutputCommitter fails on Windows. (Zuoming Zhang via gera)

2018-06-14 Thread xkrogen
MAPREDUCE-7108. TestFileOutputCommitter fails on Windows. (Zuoming Zhang via 
gera)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/04b74edd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/04b74edd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/04b74edd

Branch: refs/heads/HDFS-12943
Commit: 04b74eddc67888142879ed114d21348e8a4aad78
Parents: a16623d
Author: Gera Shegalov 
Authored: Tue Jun 12 11:21:51 2018 -0700
Committer: Gera Shegalov 
Committed: Tue Jun 12 11:21:51 2018 -0700

--
 .../lib/output/TestFileOutputCommitter.java | 16 
 1 file changed, 8 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/04b74edd/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
index cd9d44b..fc43dce 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RawLocalFileSystem;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.NullWritable;
@@ -526,16 +527,15 @@ public class TestFileOutputCommitter {
 
 // Ensure getReaders call works and also ignores
 // hidden filenames (_ or . prefixes)
+MapFile.Reader[] readers = {};
 try {
-  MapFileOutputFormat.getReaders(outDir, conf);
-} catch (Exception e) {
-  fail("Fail to read from MapFileOutputFormat: " + e);
-  e.printStackTrace();
+  readers = MapFileOutputFormat.getReaders(outDir, conf);
+  // validate output
+  validateMapFileOutputContent(FileSystem.get(job.getConfiguration()), 
outDir);
+} finally {
+  IOUtils.cleanupWithLogger(null, readers);
+  FileUtil.fullyDelete(new File(outDir.toString()));
 }
-
-// validate output
-validateMapFileOutputContent(FileSystem.get(job.getConfiguration()), 
outDir);
-FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
   @Test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[02/50] [abbrv] hadoop git commit: HADOOP-15307. NFS: flavor AUTH_SYS should use VerifierNone. Contributed by Gabor Bota.

2018-06-14 Thread xkrogen
HADOOP-15307. NFS: flavor AUTH_SYS should use VerifierNone. Contributed by 
Gabor Bota.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a16623df
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a16623df
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a16623df

Branch: refs/heads/HDFS-12943
Commit: a16623df2148e59e1b7dcf98939b03cb7440d143
Parents: c354815
Author: Xiao Chen 
Authored: Tue Jun 12 11:14:33 2018 -0700
Committer: Xiao Chen 
Committed: Tue Jun 12 11:16:24 2018 -0700

--
 .../org/apache/hadoop/oncrpc/security/Verifier.java | 12 +---
 1 file changed, 9 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a16623df/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
--
diff --git 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
index f3202a1..3c0e5fe 100644
--- 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
+++ 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java
@@ -41,12 +41,18 @@ public abstract class Verifier extends RpcAuthInfo {
   public static Verifier readFlavorAndVerifier(XDR xdr) {
 AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());
 final Verifier verifer;
-if(flavor == AuthFlavor.AUTH_NONE) {
+if (flavor == AuthFlavor.AUTH_NONE) {
   verifer = new VerifierNone();
-} else if(flavor == AuthFlavor.RPCSEC_GSS) {
+} else if (flavor == AuthFlavor.AUTH_SYS) {
+  // Added in HADOOP-15307 based on HDFS-5085:
+  // When the auth flavor is AUTH_SYS, the corresponding verifier is
+  // AUTH_NONE. I.e., it is impossible to have a verifier with auth
+  // flavor AUTH_SYS.
+  verifer = new VerifierNone();
+} else if (flavor == AuthFlavor.RPCSEC_GSS) {
   verifer = new VerifierGSS();
 } else {
-  throw new UnsupportedOperationException("Unsupported verifier flavor"
+  throw new UnsupportedOperationException("Unsupported verifier flavor: "
   + flavor);
 }
 verifer.read(xdr);


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[07/50] [abbrv] hadoop git commit: YARN-8394. Improve data locality documentation for Capacity Scheduler. Contributed by Weiwei Yang.

2018-06-14 Thread xkrogen
YARN-8394. Improve data locality documentation for Capacity Scheduler. 
Contributed by Weiwei Yang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/29024a62
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/29024a62
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/29024a62

Branch: refs/heads/HDFS-12943
Commit: 29024a62038c297f11e8992601f2522c7da7
Parents: 108da85
Author: Weiwei Yang 
Authored: Wed Jun 13 09:28:05 2018 +0800
Committer: Weiwei Yang 
Committed: Wed Jun 13 09:28:05 2018 +0800

--
 .../conf/capacity-scheduler.xml | 2 ++
 .../hadoop-yarn-site/src/site/markdown/CapacityScheduler.md | 5 +
 2 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/29024a62/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
index aca6c7c..62654ca 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
@@ -149,6 +149,8 @@
   attempts to schedule rack-local containers.
   When setting this parameter, the size of the cluster should be taken 
into account.
   We use 40 as the default value, which is approximately the number of 
nodes in one rack.
+  Note, if this value is -1, the locality constraint in the container 
request
+  will be ignored, which disables the delay scheduling.
 
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/29024a62/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
index ef6381a..5be32d4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
@@ -400,9 +400,14 @@ list of current scheduling edit policies as a comma 
separated string in `yarn.re
 
   * Data Locality
 
+Capacity Scheduler leverages `Delay Scheduling` to honor task locality 
constraints. There are 3 levels of locality constraint: node-local, rack-local 
and off-switch. The scheduler counts the number of missed opportunities when 
the locality cannot be satisfied, and waits this count to reach a threshold 
before relaxing the locality constraint to next level. The threshold can be 
configured in following properties:
+
 | Property | Description |
 |: |: |
 | `yarn.scheduler.capacity.node-locality-delay` | Number of missed scheduling 
opportunities after which the CapacityScheduler attempts to schedule rack-local 
containers. Typically, this should be set to number of nodes in the cluster. By 
default is setting approximately number of nodes in one rack which is 40. 
Positive integer value is expected. |
+| `yarn.scheduler.capacity.rack-locality-additional-delay` |  Number of 
additional missed scheduling opportunities over the node-locality-delay ones, 
after which the CapacityScheduler attempts to schedule off-switch containers. 
By default this value is set to -1, in this case, the number of missed 
opportunities for assigning off-switch containers is calculated based on the 
formula `L * C / N`, where `L` is number of locations (nodes or racks) 
specified in the resource request, `C` is the number of requested containers, 
and `N` is the size of the cluster. |
+
+Note, this feature should be disabled if YARN is deployed separately with the 
file system, as locality is meaningless. This can be done by setting 
`yarn.scheduler.capacity.node-locality-delay` to `-1`, in this case, request's 
locality constraint is ignored.
 
   * Container Allocation per NodeManager Heartbeat
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[48/50] [abbrv] hadoop git commit: HDDS-146. Refactor the structure of the acceptance tests. Contributed by Elek, Marton.

2018-06-14 Thread xkrogen
HDDS-146. Refactor the structure of the acceptance tests.
Contributed by  Elek, Marton.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/020dd619
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/020dd619
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/020dd619

Branch: refs/heads/HDFS-12943
Commit: 020dd61988b1d47971e328174135d54baf5d41aa
Parents: 5d7449d
Author: Anu Engineer 
Authored: Thu Jun 14 14:14:24 2018 -0700
Committer: Anu Engineer 
Committed: Thu Jun 14 14:14:24 2018 -0700

--
 .../dev-support/bin/robot-all.sh|   2 +-
 .../dev-support/bin/robot-dnd-all.sh|  63 +
 .../acceptance-test/dev-support/bin/robot.sh|   9 +-
 .../dev-support/docker/Dockerfile   |  21 ++
 .../dev-support/docker/docker-compose.yaml  |  23 ++
 hadoop-ozone/acceptance-test/pom.xml|   1 +
 .../src/test/acceptance/basic/.env  |  17 ++
 .../src/test/acceptance/basic/basic.robot   |  50 
 .../test/acceptance/basic/docker-compose.yaml   |  62 +
 .../src/test/acceptance/basic/docker-config |  38 +++
 .../src/test/acceptance/basic/ozone-shell.robot |  85 ++
 .../src/test/acceptance/commonlib.robot |  79 ++
 .../src/test/acceptance/ozonefs/.env|  17 ++
 .../acceptance-test/src/test/compose/.env   |  17 --
 .../src/test/compose/docker-compose.yaml|  62 -
 .../src/test/compose/docker-config  |  36 ---
 .../robotframework/acceptance/ozone-shell.robot | 256 ---
 .../test/robotframework/acceptance/ozone.robot  | 104 
 start-build-env.sh  |   8 +-
 19 files changed, 467 insertions(+), 483 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/020dd619/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
--
diff --git a/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh 
b/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
index 0e212a2..ee9c6b8 100755
--- a/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
+++ b/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
@@ -15,4 +15,4 @@
 # limitations under the License.
 
 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-$DIR/robot.sh $DIR/../../src/test/robotframework/acceptance
\ No newline at end of file
+$DIR/robot.sh $DIR/../../src/test/acceptance

http://git-wip-us.apache.org/repos/asf/hadoop/blob/020dd619/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
--
diff --git a/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh 
b/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
new file mode 100755
index 000..9f1d367
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
@@ -0,0 +1,63 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -x
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+#Dir od the definition of the dind based test exeucution container
+DOCKERDIR="$DIR/../docker"
+
+#Dir to save the results
+TARGETDIR="$DIR/../../target/dnd"
+
+#Dir to mount the distribution from
+OZONEDIST="$DIR/../../../../hadoop-dist/target/ozone"
+
+#Name and imagename of the temporary, dind based test containers
+DOCKER_IMAGE_NAME=ozoneacceptance
+DOCKER_INSTANCE_NAME="${DOCKER_INSTANCE_NAME:-ozoneacceptance}"
+
+teardown() {
+   docker stop "$DOCKER_INSTANCE_NAME"
+}
+
+trap teardown EXIT
+
+#Make sure it will work even if the ozone is built by an other user. We 
+# eneable to run the distribution by an other user
+mkdir -p "$TARGETDIR"
+mkdir -p "$OZONEDIST/logs"
+chmod o+w "$OZONEDIST/logs" || true
+chmod -R o+w "$OZONEDIST/etc/hadoop" || true
+chmod o+w "$OZONEDIST" || true
+
+rm "$TARGETDIR/docker-compose.log"
+docker rm "$DOCKER_INSTANCE_NAME" || true
+docker build -t "$DOCKER_IMAGE_NAME" $DIR/../docker
+
+#Starting the dind based environment
+docker run 

[49/50] [abbrv] hadoop git commit: Merge branch 'trunk' into HDFS-12943

2018-06-14 Thread xkrogen
Merge branch 'trunk' into HDFS-12943


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d044cae8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d044cae8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d044cae8

Branch: refs/heads/HDFS-12943
Commit: d044cae87e770624e66146f54483c03c8f35ad02
Parents: 9756c2c 020dd61
Author: Erik Krogen 
Authored: Thu Jun 14 17:36:59 2018 -0700
Committer: Erik Krogen 
Committed: Thu Jun 14 17:36:59 2018 -0700

--
 LICENSE.txt | 4 +-
 .../src/main/bin/hadoop-functions.sh|34 +-
 .../hadoop/util/curator/ZKCuratorManager.java   |14 +-
 .../hadoop-common/src/site/markdown/Metrics.md  |12 +
 .../markdown/release/3.0.3/CHANGES.3.0.3.md |   309 +
 .../release/3.0.3/RELEASENOTES.3.0.3.md |31 +
 .../org/apache/hadoop/test/MetricsAsserts.java  |22 +-
 .../org/apache/hadoop/util/TestDiskChecker.java | 6 +-
 .../src/test/scripts/hadoop_stop_daemon.bats|24 +-
 .../apache/hadoop/oncrpc/security/Verifier.java |12 +-
 .../hadoop/hdds/scm/XceiverClientGrpc.java  |23 +
 .../container/common/helpers/ContainerInfo.java |27 +-
 .../org/apache/hadoop/ozone/OzoneConsts.java| 2 +
 .../apache/hadoop/utils/MetadataKeyFilters.java |   118 +-
 hadoop-hdds/common/src/main/proto/hdds.proto| 1 +
 .../apache/hadoop/ozone/TestMetadataStore.java  |61 +-
 .../container/common/helpers/ContainerData.java |21 +
 .../common/helpers/ContainerReport.java |12 +
 .../common/impl/ContainerManagerImpl.java   |15 +-
 .../container/common/report/ReportManager.java  | 3 +-
 .../statemachine/DatanodeStateMachine.java  | 9 +
 .../background/BlockDeletingService.java|16 +-
 .../CloseContainerCommandHandler.java   |21 +-
 .../commandhandler/CommandDispatcher.java   | 4 +
 .../DeleteBlocksCommandHandler.java | 3 +
 .../states/endpoint/HeartbeatEndpointTask.java  |12 +-
 .../common/transport/server/XceiverServer.java  | 7 +
 .../transport/server/XceiverServerGrpc.java | 9 +
 .../transport/server/XceiverServerSpi.java  | 7 +
 .../server/ratis/XceiverServerRatis.java|56 +-
 .../container/ozoneimpl/OzoneContainer.java |62 +-
 .../commands/CloseContainerCommand.java |12 +-
 .../StorageContainerDatanodeProtocol.proto  | 2 +
 .../common/report/TestReportPublisher.java  |79 +
 .../hadoop/hdds/scm/block/BlockManagerImpl.java | 9 +-
 .../hadoop/hdds/scm/block/DeletedBlockLog.java  | 3 +-
 .../hdds/scm/block/DeletedBlockLogImpl.java | 8 +-
 .../container/CloseContainerEventHandler.java   | 3 +-
 .../hdds/scm/container/ContainerMapping.java|35 +
 .../scm/container/ContainerStateManager.java|12 +
 .../hadoop/hdds/scm/container/Mapping.java  |11 +
 .../scm/container/closer/ContainerCloser.java   | 7 +-
 .../scm/server/SCMDatanodeProtocolServer.java   |   119 +-
 .../SCMDatanodeContainerReportHandler.java  |76 +
 .../report/SCMDatanodeHeartbeatDispatcher.java  |   189 +
 .../report/SCMDatanodeNodeReportHandler.java|43 +
 .../server/report/SCMDatanodeReportHandler.java |83 +
 .../report/SCMDatanodeReportHandlerFactory.java |82 +
 .../hdds/scm/server/report/package-info.java|57 +
 .../server-scm/src/main/webapps/scm/index.html  | 6 +-
 .../hadoop/hdds/scm/block/TestBlockManager.java |16 +
 .../scm/container/TestContainerMapping.java | 6 +-
 .../container/closer/TestContainerCloser.java   | 3 +-
 .../TestSCMDatanodeContainerReportHandler.java  |34 +
 .../TestSCMDatanodeHeartbeatDispatcher.java |   138 +
 .../TestSCMDatanodeNodeReportHandler.java   |36 +
 .../TestSCMDatanodeReportHandlerFactory.java|51 +
 .../hdds/scm/server/report/package-info.java|21 +
 .../ha/ConfiguredFailoverProxyProvider.java |29 +-
 .../ha/TestConfiguredFailoverProxyProvider.java |   264 +
 .../driver/impl/StateStoreZooKeeperImpl.java| 6 +-
 .../main/webapps/router/federationhealth.html   | 6 +-
 .../jdiff/Apache_Hadoop_HDFS_3.0.3.xml  |   322 +
 hadoop-hdfs-project/hadoop-hdfs/pom.xml | 4 +-
 .../hdfs/server/namenode/ha/EditLogTailer.java  |11 +
 .../namenode/metrics/NameNodeMetrics.java   |59 +-
 .../src/main/resources/hdfs-default.xml |12 +
 .../src/main/webapps/datanode/datanode.html | 6 +-
 .../src/main/webapps/hdfs/dfshealth.html| 6 +-
 .../src/main/webapps/hdfs/dfshealth.js  | 8 +-
 .../src/main/webapps/hdfs/explorer.html |10 +-
 .../src/main/webapps/hdfs/explorer.js   |34 +-
 .../src/main/webapps/journal/index.html | 6 +-
 .../src/main/webapps/secondary/status.html 

[25/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
new file mode 100644
index 000..6167622
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
@@ -0,0 +1,6757 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ */
+/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */
+html {
+  font-family: sans-serif;
+  -webkit-text-size-adjust: 100%;
+  -ms-text-size-adjust: 100%;
+}
+body {
+  margin: 0;
+}
+article,
+aside,
+details,
+figcaption,
+figure,
+footer,
+header,
+hgroup,
+main,
+menu,
+nav,
+section,
+summary {
+  display: block;
+}
+audio,
+canvas,
+progress,
+video {
+  display: inline-block;
+  vertical-align: baseline;
+}
+audio:not([controls]) {
+  display: none;
+  height: 0;
+}
+[hidden],
+template {
+  display: none;
+}
+a {
+  background-color: transparent;
+}
+a:active,
+a:hover {
+  outline: 0;
+}
+abbr[title] {
+  border-bottom: 1px dotted;
+}
+b,
+strong {
+  font-weight: bold;
+}
+dfn {
+  font-style: italic;
+}
+h1 {
+  margin: .67em 0;
+  font-size: 2em;
+}
+mark {
+  color: #000;
+  background: #ff0;
+}
+small {
+  font-size: 80%;
+}
+sub,
+sup {
+  position: relative;
+  font-size: 75%;
+  line-height: 0;
+  vertical-align: baseline;
+}
+sup {
+  top: -.5em;
+}
+sub {
+  bottom: -.25em;
+}
+img {
+  border: 0;
+}
+svg:not(:root) {
+  overflow: hidden;
+}
+figure {
+  margin: 1em 40px;
+}
+hr {
+  height: 0;
+  -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+  box-sizing: content-box;
+}
+pre {
+  overflow: auto;
+}
+code,
+kbd,
+pre,
+samp {
+  font-family: monospace, monospace;
+  font-size: 1em;
+}
+button,
+input,
+optgroup,
+select,
+textarea {
+  margin: 0;
+  font: inherit;
+  color: inherit;
+}
+button {
+  overflow: visible;
+}
+button,
+select {
+  text-transform: none;
+}
+button,
+html input[type="button"],
+input[type="reset"],
+input[type="submit"] {
+  -webkit-appearance: button;
+  cursor: pointer;
+}
+button[disabled],
+html input[disabled] {
+  cursor: default;
+}
+button::-moz-focus-inner,
+input::-moz-focus-inner {
+  padding: 0;
+  border: 0;
+}
+input {
+  line-height: normal;
+}
+input[type="checkbox"],
+input[type="radio"] {
+  -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+  box-sizing: border-box;
+  padding: 0;
+}
+input[type="number"]::-webkit-inner-spin-button,
+input[type="number"]::-webkit-outer-spin-button {
+  height: auto;
+}
+input[type="search"] {
+  -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+  box-sizing: content-box;
+  -webkit-appearance: textfield;
+}
+input[type="search"]::-webkit-search-cancel-button,
+input[type="search"]::-webkit-search-decoration {
+  -webkit-appearance: none;
+}
+fieldset {
+  padding: .35em .625em .75em;
+  margin: 0 2px;
+  border: 1px solid #c0c0c0;
+}
+legend {
+  padding: 0;
+  border: 0;
+}
+textarea {
+  overflow: auto;
+}
+optgroup {
+  font-weight: bold;
+}
+table {
+  border-spacing: 0;
+  border-collapse: collapse;
+}
+td,
+th {
+  padding: 0;
+}
+/*! Source: 
https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */
+@media print {
+  *,
+  *:before,
+  *:after {
+color: #000 !important;
+text-shadow: none !important;
+background: transparent !important;
+-webkit-box-shadow: none !important;
+box-shadow: none !important;
+  }
+  a,
+  a:visited {
+text-decoration: underline;
+  }
+  a[href]:after {
+content: " (" attr(href) ")";
+  }
+  abbr[title]:after {
+content: " (" attr(title) ")";
+  }
+  a[href^="#"]:after,
+  a[href^="javascript:"]:after {
+content: "";
+  }
+  pre,
+  blockquote {
+border: 1px solid #999;
+
+page-break-inside: avoid;
+  }
+  thead {
+display: table-header-group;
+  }
+  tr,
+  img {
+page-break-inside: avoid;
+  }
+  img {
+max-width: 100% !important;
+  }
+  p,
+  h2,
+  h3 {
+orphans: 3;
+widows: 3;
+  }
+  h2,
+  h3 {
+page-break-after: avoid;
+  }
+  .navbar {
+display: none;
+  }
+  .btn > .caret,
+  .dropup > .btn > .caret {
+border-top-color: #000 !important;
+  }
+  .label {
+border: 1px solid #000;
+  }
+  .table {
+border-collapse: collapse !important;
+  }
+  .table td,
+  .table th {
+background-color: #fff !important;
+  }
+  .table-bordered th,
+  .table-bordered td {
+border: 1px solid #ddd !important;
+  }
+}
+@font-face {
+  font-family: 'Glyphicons Halflings';
+
+  src: 

[28/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
deleted file mode 100644
index 0e668e8..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*!
- * Bootstrap v3.0.2 by @fat and @mdo
- * Copyright 2013 Twitter, Inc.
- * Licensed under http://www.apache.org/licenses/LICENSE-2.0
- *
- * Designed and built with all the love in the world by @mdo and @fat.
- */
-
-if("undefined"==typeof jQuery)throw new Error("Bootstrap requires 
jQuery");+function(a){"use strict";function b(){var 
a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd
 otransitionend",transition:"transitionend"};for(var c in b)if(void 
0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var 
c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var 
e=function(){c||a(d).trigger(a.support.transition.end)};return 
setTimeout(e,b),this},a(function(){a.support.transition=b()})}(jQuery),+function(a){"use
 strict";var 
b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function
 c(){f.trigger("closed.bs.alert").remove()}var 
d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&(/.*(?=#[^\s]*$)/,""));var
 
f=a(e);b&(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert"
 
)),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var
 d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var 
d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new 
c(this)),"string"==typeof 
b&[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return
 
a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(jQuery),+function(a){"use
 strict";var 
b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d)};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(a){var
 
b="disabled",c=this.$element,d=c.is("input")?"val":"html",e=c.data();a+="Text",e.resetText||c.data("resetText",c[d]()),c[d](e[a]||this.options[a]),setTimeout(function(){"loadingText"==a?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var
 a=this.$element.closest('[data-toggle="buttons"]');i
 f(a.length){var 
b=this.$element.find("input").prop("checked",!this.$element.hasClass("active")).trigger("change");"radio"===b.prop("type")&(".active").removeClass("active")}this.$element.toggleClass("active")};var
 c=a.fn.button;a.fn.button=function(c){return this.each(function(){var 
d=a(this),e=d.data("bs.button"),f="object"==typeof 
c&e||d.data("bs.button",e=new 
b(this,f)),"toggle"==c?e.toggle():c&(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return
 
a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var
 
c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(jQuery),+function(a){"use
 strict";var 
b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&$element.on("mouseenter",a.proxy(this.pause,this)).o
 
n("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return
 
b||(this.paused=!1),this.interval&(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return
 
this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var
 c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 
0:this.sliding?this.$element.one("slid",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return
 b||(this.paused=!0),this.$element.find(".next, 
.prev").length&&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return
 this
 .sliding?void 0:this.slide("next")},b.prototype.prev=function(){return 
this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var 

[40/50] [abbrv] hadoop git commit: HDDS-163. Add Datanode heartbeat dispatcher in SCM. Contributed by Nandakumar.

2018-06-14 Thread xkrogen
HDDS-163. Add Datanode heartbeat dispatcher in SCM.
Contributed by Nandakumar.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ddd09d59
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ddd09d59
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ddd09d59

Branch: refs/heads/HDFS-12943
Commit: ddd09d59f3d9825f068026622720914e04c2e1d6
Parents: 7547740
Author: Anu Engineer 
Authored: Wed Jun 13 20:18:22 2018 -0700
Committer: Anu Engineer 
Committed: Wed Jun 13 20:18:22 2018 -0700

--
 .../container/common/report/ReportManager.java  |   3 +-
 .../scm/server/SCMDatanodeProtocolServer.java   | 119 +---
 .../SCMDatanodeContainerReportHandler.java  |  76 
 .../report/SCMDatanodeHeartbeatDispatcher.java  | 189 +++
 .../report/SCMDatanodeNodeReportHandler.java|  43 +
 .../server/report/SCMDatanodeReportHandler.java |  83 
 .../report/SCMDatanodeReportHandlerFactory.java |  82 
 .../hdds/scm/server/report/package-info.java|  57 ++
 .../TestSCMDatanodeContainerReportHandler.java  |  34 
 .../TestSCMDatanodeHeartbeatDispatcher.java | 138 ++
 .../TestSCMDatanodeNodeReportHandler.java   |  36 
 .../TestSCMDatanodeReportHandlerFactory.java|  51 +
 .../hdds/scm/server/report/package-info.java|  21 +++
 .../apache/hadoop/ozone/scm/TestSCMMetrics.java |  20 +-
 14 files changed, 875 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ddd09d59/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
index c09282e..8097cd6 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
@@ -50,7 +50,8 @@ public final class ReportManager {
 List publishers) {
 this.context = context;
 this.publishers = publishers;
-this.executorService = HadoopExecutors.newScheduledThreadPool(1,
+this.executorService = HadoopExecutors.newScheduledThreadPool(
+publishers.size(),
 new ThreadFactoryBuilder().setDaemon(true)
 .setNameFormat("Datanode ReportManager Thread - %d").build());
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ddd09d59/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
--
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
index 1b1645d..7d16161 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
@@ -69,7 +69,7 @@ import static org.apache.hadoop.hdds.protocol.proto
 
 
 import org.apache.hadoop.hdds.scm.HddsServerUtil;
-import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
+import org.apache.hadoop.hdds.scm.server.report.SCMDatanodeHeartbeatDispatcher;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
@@ -114,6 +114,7 @@ public class SCMDatanodeProtocolServer implements
 
   private final StorageContainerManager scm;
   private final InetSocketAddress datanodeRpcAddress;
+  private final SCMDatanodeHeartbeatDispatcher heartbeatDispatcher;
 
   public SCMDatanodeProtocolServer(final OzoneConfiguration conf,
   StorageContainerManager scm)  throws IOException {
@@ -148,14 +149,22 @@ public class SCMDatanodeProtocolServer implements
 updateRPCListenAddress(
 conf, OZONE_SCM_DATANODE_ADDRESS_KEY, datanodeRpcAddr,
 datanodeRpcServer);
+
+heartbeatDispatcher = SCMDatanodeHeartbeatDispatcher.newBuilder(conf, scm)
+.addHandlerFor(NodeReportProto.class)
+.addHandlerFor(ContainerReportsProto.class)
+.build();
   }
 
-  public InetSocketAddress getDatanodeRpcAddress() {
-return datanodeRpcAddress;
+  public void start() {
+LOG.info(
+StorageContainerManager.buildRpcServerStartMessage(
+"RPC server for DataNodes", 

[37/50] [abbrv] hadoop git commit: YARN-8411. Restart stopped system service during RM start. Contributed by Billie Rinaldi

2018-06-14 Thread xkrogen
YARN-8411.  Restart stopped system service during RM start.
Contributed by Billie Rinaldi


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/69b05968
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/69b05968
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/69b05968

Branch: refs/heads/HDFS-12943
Commit: 69b05968974994c6e22d6562a67b9392d1700094
Parents: 7566e0e
Author: Eric Yang 
Authored: Wed Jun 13 19:05:52 2018 -0400
Committer: Eric Yang 
Committed: Wed Jun 13 19:05:52 2018 -0400

--
 .../client/SystemServiceManagerImpl.java| 29 --
 .../hadoop/yarn/service/webapp/ApiServer.java   |  3 +-
 .../hadoop/yarn/service/ServiceClientTest.java  |  4 +-
 .../client/TestSystemServiceManagerImpl.java| 40 +---
 .../yarn/service/client/ServiceClient.java  | 14 +--
 5 files changed, 72 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/69b05968/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
index f9cfa92..08ad1b6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
@@ -29,7 +29,9 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.service.SystemServiceManager;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.ServiceState;
+import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
 import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -228,12 +230,31 @@ public class SystemServiceManagerImpl extends 
AbstractService
   userUgi.doAs(new PrivilegedExceptionAction() {
 @Override public ApplicationId run()
 throws IOException, YarnException {
-  ApplicationId applicationId = 
serviceClient.actionCreate(service);
-  return applicationId;
+  boolean tryStart = true;
+  try {
+serviceClient.actionBuild(service);
+  } catch (Exception e) {
+if (e instanceof SliderException && ((SliderException) e)
+.getExitCode() == SliderExitCodes.EXIT_INSTANCE_EXISTS) {
+  LOG.info("Service {} already exists, will attempt to start " 
+
+  "service", service.getName());
+} else {
+  tryStart = false;
+  LOG.info("Got exception saving {}, will not attempt to " +
+  "start service", service.getName(), e);
+}
+  }
+  if (tryStart) {
+return serviceClient.actionStartAndGetId(service.getName());
+  } else {
+return null;
+  }
 }
   });
-  LOG.info("Service {} submitted with Application ID: {}",
-  service.getName(), applicationId);
+  if (applicationId != null) {
+LOG.info("Service {} submitted with Application ID: {}",
+service.getName(), applicationId);
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/69b05968/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
index 578273c..82fadae 100644
--- 

[31/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
deleted file mode 100644
index 3deec34..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
+++ /dev/null
@@ -1,9 +0,0 @@
-/*!
- * Bootstrap v3.0.2 by @fat and @mdo
- * Copyright 2013 Twitter, Inc.
- * Licensed under http://www.apache.org/licenses/LICENSE-2.0
- *
- * Designed and built with all the love in the world by @mdo and @fat.
- */
-
-/*! normalize.css v2.1.3 | MIT License | git.io/normalize 
*/article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,video{display:inline-block}audio:not([controls]){display:none;height:0}[hidden],template{display:none}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a{background:transparent}a:focus{outline:thin
 dotted}a:active,a:hover{outline:0}h1{margin:.67em 
0;font-size:2em}abbr[title]{border-bottom:1px 
dotted}b,strong{font-weight:bold}dfn{font-style:italic}hr{height:0;-moz-box-sizing:content-box;box-sizing:content-box}mark{color:#000;background:#ff0}code,kbd,pre,samp{font-family:monospace,serif;font-size:1em}pre{white-space:pre-wrap}q{quotes:"\201C"
 "\201D" "\2018" 
"\2019"}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:0}fieldset{paddi
 ng:.35em .625em .75em;margin:0 2px;border:1px solid 
#c0c0c0}legend{padding:0;border:0}button,input,select,textarea{margin:0;font-family:inherit;font-size:100%}button,input{line-height:normal}button,select{text-transform:none}button,html
 
input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}button[disabled],html
 
input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{padding:0;box-sizing:border-box}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}textarea{overflow:auto;vertical-align:top}table{border-collapse:collapse;border-spacing:0}@media
 
print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!impo
 rtant}a,a:visited{text-decoration:underline}a[href]:after{content:" (" 
attr(href) ")"}abbr[title]:after{content:" (" attr(title) 
")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px
 solid 
#999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:2cm
 
.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}select{background:#fff!important}.navbar{display:none}.table
 td,.table 
th{background-color:#fff!important}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px
 solid #000}.table{border-collapse:collapse!important}.table-bordered 
th,.table-bordered td{border:1px solid 
#ddd!important}}*,*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:62.5%;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica
 Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.428571429;
 
color:#333;background-color:#fff}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#428bca;text-decoration:none}a:hover,a:focus{color:#2a6496;text-decoration:underline}a:focus{outline:thin
 dotted #333;outline:5px auto 
-webkit-focus-ring-color;outline-offset:-2px}img{vertical-align:middle}.img-responsive{display:block;height:auto;max-width:100%}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;height:auto;max-width:100%;padding:4px;line-height:1.428571429;background-color:#fff;border:1px
 solid #ddd;border-radius:4px;-webkit-transition:all .2s 
ease-in-out;transition:all .2s 
ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px
 solid 
#eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}p{margin:0
 0 
10px}.lead{margin-bottom:20px;font-size:16px;font-weight:200;line-height:1.4}@media(min-width:768px){.lead{font-si
 

[45/50] [abbrv] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
new file mode 100644
index 000..25398a1
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
@@ -0,0 +1,13 @@
+/*! jQuery UI - v1.12.1 - 2016-09-14
+* http://jqueryui.com
+* Includes: widget.js, position.js, data.js, disable-selection.js, effect.js, 
effects/effect-blind.js, effects/effect-bounce.js, effects/effect-clip.js, 
effects/effect-drop.js, effects/effect-explode.js, effects/effect-fade.js, 
effects/effect-fold.js, effects/effect-highlight.js, effects/effect-puff.js, 
effects/effect-pulsate.js, effects/effect-scale.js, effects/effect-shake.js, 
effects/effect-size.js, effects/effect-slide.js, effects/effect-transfer.js, 
focusable.js, form-reset-mixin.js, jquery-1-7.js, keycode.js, labels.js, 
scroll-parent.js, tabbable.js, unique-id.js, widgets/accordion.js, 
widgets/autocomplete.js, widgets/button.js, widgets/checkboxradio.js, 
widgets/controlgroup.js, widgets/datepicker.js, widgets/dialog.js, 
widgets/draggable.js, widgets/droppable.js, widgets/menu.js, widgets/mouse.js, 
widgets/progressbar.js, widgets/resizable.js, widgets/selectable.js, 
widgets/selectmenu.js, widgets/slider.js, widgets/sortable.js, 
widgets/spinner.js, widgets/tabs.js, widgets/toolt
 ip.js
+* Copyright jQuery Foundation and other contributors; Licensed MIT */
+
+(function(t){"function"==typeof 
define&?define(["jquery"],t):t(jQuery)})(function(t){function 
e(t){for(var 
e=t.css("visibility");"inherit"===e;)t=t.parent(),e=t.css("visibility");return"hidden"!==e}function
 i(t){for(var 
e,i;t.length&[0]!==document;){if(e=t.css("position"),("absolute"===e||"relative"===e||"fixed"===e)&&(i=parseInt(t.css("zIndex"),10),!isNaN(i)&&0!==i))return
 i;t=t.parent()}return 0}function 
s(){this._curInst=null,this._keyEvent=!1,this._disabledInputs=[],this._datepickerShowing=!1,this._inDialog=!1,this._mainDivId="ui-datepicker-div",this._inlineClass="ui-datepicker-inline",this._appendClass="ui-datepicker-append",this._triggerClass="ui-datepicker-trigger",this._dialogClass="ui-datepicker-dialog",this._disableClass="ui-datepicker-disabled",this._unselectableClass="ui-datepicker-unselectable",this._currentClass="ui-datepicker-current-day",this._dayOverClass="ui-datepicker-days-cell-over",this.regional=[],this.regional[""]={closeText:"Done",prevText:"Prev"
 
,nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:!1,hideIfNoPrevNext:!1,navigationAsDateFormat:!1,gotoCurrent:!1,changeMonth:!1,changeYear:!1,yearRange:"c-10:c+10",showOtherMonths:!1,selectOtherMonths:!1,showWeek:!1,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeM
 
onthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:!0,showButtonPanel:!1,autoSize:!1,disabled:!1},t.extend(this._defaults,this.regional[""]),this.regional.en=t.extend(!0,{},this.regional[""]),this.regional["en-US"]=t.extend(!0,{},this.regional.en),this.dpDiv=n(t(""))}function n(e){var i="button, 
.ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a";return 
e.on("mouseout",i,function(){t(this).removeClass("ui-state-hover"),-1!==this.className.indexOf("ui-datepicker-prev")&(this).removeClass("ui-datepicker-prev-hover"),-1!==this.className.indexOf("ui-datepicker-next")&(this).removeClass("ui-datepicker-next-hover")}).on("mouseover",i,o)}function
 
o(){t.datepicker._isDisabledDatepicker(m.inline?m.dpDiv.parent()[0]:m.input[0])||(t(this).parents(".ui-datepicker-calendar")
 

[44/50] [abbrv] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
deleted file mode 100644
index aa7a923..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery UI - v1.9.1 - 2012-10-25
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, 
jquery.ui.position.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, 
jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, 
jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.effect.js, 
jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, 
jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, 
jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, 
jquery.ui.effect-fold.js, jquery.ui.effect-highlight.js, 
jquery.ui.effect-pulsate.js, jquery.ui.effect-scale.js, 
jquery.ui.effect-shake.js, jquery.ui.effect-slide.js, 
jquery.ui.effect-transfer.js, jquery.ui.menu.js, jquery.ui.progressbar.js, 
jquery.ui.resizable.js, jquery.ui.selectable.js, jquery.ui.slider.js, 
jquery.ui.sortable.js, jquery.ui.spinner.js, jquery.ui.tabs.js, 
jquery.ui.tooltip.js
-* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
-
-(function(e,t){function i(t,n){var 
r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&(t)}function
 s(t){return 
e.expr.filters.visible(t)&&!e(t).parents().andSelf().filter(function(){return 
e.css(this,"visibility")==="hidden"}).length}var 
n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return;e.extend(e.ui,{version:"1.9.1",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({_focus:e.fn.focus,focus:function(t,n){return
 typeof t=="number"?this.each(function(){var 
r=this;setTimeout(function(){e(r).focus(),n&(r)},t)}):this._focus.apply(this,arguments)},scrollPa
 rent:function(){var t;return 
e.ui.ie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?t=this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):t=this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(n){if(n!==t)return
 this.css("zIndex",n);if(this.length){var 
r=e(this[0]),i,s;while(r.length&[0]!==document){i=r.css("position");if(i==="absolute"||i==="relative"||i==="fixed"){s=parseInt(r.css("zIndex"),10);if(!isNaN(s)&!==0)return
 s}r=r.parent()}}return 0},uniqueId:function(){return 
this.each(function(){this.id||(this.id="ui-id-"+ 
++n)})},removeUniqueId:function(){return 
this.each(function(){r.test(this.id)&(this).removeAttr("id")})}}),e("
 ").outerWidth(1).jquery||e.each(["Width","Height"],function(n,r){function 
u(t,n,r,s){return 
e.each(i,function(){n-=parseFloat(e.css(t,"padding"+this))||0,r&&(n-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(n-=parseFloat(e.css(t,"margin"+this))||0)}),n}var
 
i=r==="Width"?["Left","Right"]:["Top","Bottom"],s=r.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+r]=function(n){return
 
n===t?o["inner"+r].call(this):this.each(function(){e(this).css(s,u(this,n)+"px")})},e.fn["outer"+r]=function(t,n){return
 typeof 
t!="number"?o["outer"+r].call(this,t):this.each(function(){e(this).css(s,u(this,t,!0,n)+"px")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return
 
function(n){return!!e.data(n,t)}}):function(t,n,r){return!!e.data(t,r[3])},focusable:function(t){return
 i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var 
n=e.attr(t,"tabindex"),r=isNaN(n);retu
 rn(r||n>=0)&(t,!r)}}),e(function(){var 
t=document.body,n=t.appendChild(n=document.createElement("div"));n.offsetHeight,e.extend(n.style,{minHeight:"100px",height:"auto",padding:0,borderWidth:0}),e.support.minHeight=n.offsetHeight===100,e.support.selectstart="onselectstart"in

[42/50] [abbrv] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread xkrogen
HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8d4926f3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8d4926f3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8d4926f3

Branch: refs/heads/HDFS-12943
Commit: 8d4926f38bf53b32453cd2bc7322c8818f752f85
Parents: 9119b3c
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 09:58:50 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8d4926f3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index aa4d481..c6139c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -97,6 +97,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[17/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
new file mode 100644
index 000..9bcd2fc
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
@@ -0,0 +1,7 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires 
jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" 
")[0].split(".");if(b[0]<2&[1]<9||1==b[0]&&9==b[1]&[2]<1||b[0]>3)throw new 
Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but 
lower than version 4")}(jQuery),+function(a){"use strict";function b(){var 
a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd
 otransitionend",transition:"transitionend"};for(var c in b)if(void 
0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var
 c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var 
e=function(){c||a(d).trigger(a.support.transition.end)};return 
setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b
 .target).is(this))return 
b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use 
strict";function b(b){return this.each(function(){var 
c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new 
d(this)),"string"==typeof b&[b].call(c)})}var 
c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function
 c(){g.detach().trigger("closed.bs.alert").remove()}var 
e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&(/.*(?=#[^\s]*$)/,""));var
 
g=a("#"===f?[]:f);b&(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var
 
e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return
 a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c
 ,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return 
this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof 
b&e||d.data("bs.button",e=new 
c(this,f)),"toggle"==b?e.toggle():b&(b)})}var 
c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var
 
c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var
 a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var 
c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeCla
 
ss("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&("change")}else
 
this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var
 
d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return
 
a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var
 
d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"],
 
input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api
 
blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"us
 e strict";function b(b){return this.each(function(){var 
d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof
 b&),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new 
c(this,f)),"number"==typeof 
b?e.to(b):g?e[g]():f.interval&().cycle()})}var 

[09/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
new file mode 100644
index 000..4d9b3a2
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | 
jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof 
module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw
 new Error("jQuery requires a window with a document");return 
t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use 
strict";var 
n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function
 e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function 
e(t){return null!=t&===t.window},v={type:!0,src:!0,noModule:!0};function 
m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in 
v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function 
x(e){return null==e?e+"":"object"==typeof e||"function"==typeof 
e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new 
w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructo
 r:w,length:0,toArray:function(){return o.call(this)},get:function(e){return 
null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var 
t=w.merge(this.constructor(),e);return 
t.prevObject=this,t},each:function(e){return 
w.each(this,e)},map:function(e){return 
this.pushStack(w.map(this,function(t,n){return 
e.call(t,n,t)}))},slice:function(){return 
this.pushStack(o.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(e<0?t:0);return 
this.pushStack(n>=0&0& in e)}var E=function(e){var 
t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new 
Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return 
e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var
 n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new 
RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new 
RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new 
RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new 
RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new 
RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new
 RegExp("^(?:"+P+")$","i"),needsContext:new 
RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|n
 
th|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native
 \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new 
RegExp("([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var 
r="0x"+t-65536;return 
r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\u\w-]/g,ne=function(e,t){return
 t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" 
":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in 
e||"label"in 
e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var
 n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var 
o,s,l,c,f,h,v,m=t&,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof 
e
 ||!e||1!==T&&9!==T&&11!==T)return 
r;if(!i&&((t?t.ownerDocument||t:w)!==d&(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return
 r;if(l.id===o)return r.push(l),r}else 
if(m&&(l=m.getElementById(o))&(t,l)&===o)return 
r.push(l),r}else{if(f[2])return 
L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&)return
 L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" 
"]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else 
if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+"
 "+ve(h[s]);v=h.join(","),m=K.test(e)&(t.parentNode)||t}if(v)try{return 
L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&("id")}}}return
 u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return 
e.push(n+" ")>r.cacheLength& t[e.shift()],t[n+" 

[50/50] [abbrv] hadoop git commit: HDFS-13608. [SBN read] Edit Tail Fast Path Part 2: Add ability for JournalNode to serve edits via RPC. Contributed by Erik Krogen.

2018-06-14 Thread xkrogen
HDFS-13608. [SBN read] Edit Tail Fast Path Part 2: Add ability for JournalNode 
to serve edits via RPC. Contributed by Erik Krogen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/292ccdce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/292ccdce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/292ccdce

Branch: refs/heads/HDFS-12943
Commit: 292ccdce88a813b27afde14e7d2fd51b193dd032
Parents: d044cae
Author: Erik Krogen 
Authored: Wed May 23 12:42:13 2018 -0700
Committer: Erik Krogen 
Committed: Thu Jun 14 17:37:23 2018 -0700

--
 .../hadoop-common/src/site/markdown/Metrics.md  |  5 ++
 .../qjournal/protocol/QJournalProtocol.java | 24 +++-
 .../QJournalProtocolServerSideTranslatorPB.java | 14 +
 .../QJournalProtocolTranslatorPB.java   | 20 +++
 .../hadoop/hdfs/qjournal/server/Journal.java| 59 
 .../hdfs/qjournal/server/JournalMetrics.java| 20 ++-
 .../qjournal/server/JournalNodeRpcServer.java   |  8 +++
 .../src/main/proto/QJournalProtocol.proto   | 18 ++
 .../hdfs/qjournal/server/TestJournal.java   | 47 
 9 files changed, 213 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/292ccdce/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
--
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
index 676ab0b..3efec32 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
@@ -307,6 +307,11 @@ The server-side metrics for a journal from the 
JournalNode's perspective. Each m
 | `LastWrittenTxId` | The highest transaction id stored on this JournalNode |
 | `LastPromisedEpoch` | The last epoch number which this node has promised not 
to accept any lower epoch, or 0 if no promises have been made |
 | `LastJournalTimestamp` | The timestamp of last successfully written 
transaction |
+| `TxnsServedViaRpc` | Number of transactions served via the RPC mechanism |
+| `BytesServedViaRpc` | Number of bytes served via the RPC mechanism |
+| `RpcRequestCacheMissAmountNumMisses` | Number of RPC requests which could 
not be served due to lack of data in the cache |
+| `RpcRequestCacheMissAmountAvgTxns` | The average number of transactions by 
which a request missed the cache; for example if transaction ID 10 is requested 
and the cache's oldest transaction is ID 15, value 5 will be added to this 
average |
+| `RpcEmptyResponses` | Number of RPC requests with zero edits returned |
 
 datanode
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/292ccdce/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocol/QJournalProtocol.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocol/QJournalProtocol.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocol/QJournalProtocol.java
index 5558bd5..c002796 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocol/QJournalProtocol.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocol/QJournalProtocol.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.client.QuorumJournalManager;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestResponseProto;
+import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetJournaledEditsResponseProto;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetJournalStateResponseProto;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.NewEpochResponseProto;
 import 
org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto;
@@ -139,7 +140,28 @@ public interface QJournalProtocol {
  long sinceTxId,
  boolean inProgressOk)
   throws IOException;
-  
+
+  /**
+   * Fetch edit logs present in the Journal's in-memory cache of edits
+   * ({@link org.apache.hadoop.hdfs.qjournal.server.JournaledEditsCache}).
+   * To enable this cache, in-progress edit log tailing must be enabled via the
+   * {@value DFSConfigKeys#DFS_HA_TAILEDITS_INPROGRESS_KEY} configuration key.
+   *
+   * @param jid The ID of the journal from which to fetch 

[08/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
index d4d8985..b212c9e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jt/jquery.jstree.js
@@ -435,7 +435,7 @@
   .undelegate(".jstree")
   .removeData("jstree-instance-id")
   .find("[class^='jstree']")
-.andSelf()
+.addBack()
 .attr("class", function () { return 
this.className.replace(/jstree[^ ]*|$/ig,''); });
 $(document)
   .unbind(".jstree-" + n)
@@ -678,7 +678,7 @@
 }
 else {
   original_obj = obj;
-  if(obj.is(".jstree-closed")) { obj = 
obj.find("li.jstree-closed").andSelf(); }
+  if(obj.is(".jstree-closed")) { obj = 
obj.find("li.jstree-closed").addBack(); }
   else { obj = obj.find("li.jstree-closed"); }
 }
 var _this = this;
@@ -694,12 +694,12 @@
 var _this = this;
 obj = obj ? this._get_node(obj) : this.get_container();
 if(!obj || obj === -1) { obj = this.get_container_ul(); }
-obj.find("li.jstree-open").andSelf().each(function () { 
_this.close_node(this, !do_animation); });
+obj.find("li.jstree-open").addBack().each(function () { 
_this.close_node(this, !do_animation); });
 this.__callback({ "obj" : obj });
   },
   clean_node  : function (obj) {
 obj = obj && obj != -1 ? $(obj) : this.get_container_ul();
-obj = obj.is("li") ? obj.find("li").andSelf() : obj.find("li");
+obj = obj.is("li") ? obj.find("li").addBack() : obj.find("li");
 obj.removeClass("jstree-last")
   .filter("li:last-child").addClass("jstree-last").end()
   .filter(":has(li)")
@@ -922,7 +922,7 @@
 if(!obj || !obj.o || obj.or[0] === obj.o[0]) { return false; }
 if(obj.op && obj.np && obj.op[0] === obj.np[0] && obj.cp - 1 === 
obj.o.index()) { return false; }
 obj.o.each(function () {
-  if(r.parentsUntil(".jstree", "li").andSelf().index(this) !== -1) { 
ret = false; return false; }
+  if(r.parentsUntil(".jstree", "li").addBack().index(this) !== -1) { 
ret = false; return false; }
 });
 return ret;
   },
@@ -941,7 +941,7 @@
 var o = false;
 if(is_copy) {
   o = obj.o.clone(true);
-  o.find("*[id]").andSelf().each(function () {
+  o.find("*[id]").addBack().each(function () {
 if(this.id) { this.id = "copy_" + this.id; }
   });
 }
@@ -1138,7 +1138,7 @@
   switch(!0) {
 case (is_range):
   this.data.ui.last_selected.addClass("jstree-last-selected");
-  obj = obj[ obj.index() < this.data.ui.last_selected.index() ? 
"nextUntil" : "prevUntil" ](".jstree-last-selected").andSelf();
+  obj = obj[ obj.index() < this.data.ui.last_selected.index() ? 
"nextUntil" : "prevUntil" ](".jstree-last-selected").addBack();
   if(s.select_limit == -1 || obj.length < s.select_limit) {
 this.data.ui.last_selected.removeClass("jstree-last-selected");
 this.data.ui.selected.each(function () {
@@ -1242,7 +1242,7 @@
 .bind("move_node.jstree", $.proxy(function (e, data) {
   if(this._get_settings().crrm.move.open_onmove) {
 var t = this;
-
data.rslt.np.parentsUntil(".jstree").andSelf().filter(".jstree-closed").each(function
 () {
+
data.rslt.np.parentsUntil(".jstree").addBack().filter(".jstree-closed").each(function
 () {
   t.open_node(this, false, true);
 });
   }
@@ -2799,7 +2799,7 @@
 obj.each(function () {
   t = $(this);
   c = t.is("li") && (t.hasClass("jstree-checked") || (rc && 
t.children(":checked").length)) ? "jstree-checked" : "jstree-unchecked";
-  t.find("li").andSelf().each(function () {
+  t.find("li").addBack().each(function () {
 var $t = $(this), nm;
 $t.children("a" + (_this.data.languages ? "" : ":eq(0)") 
).not(":has(.jstree-checkbox)").prepend("").parent().not(".jstree-checked, 
.jstree-unchecked").addClass( ts ? "jstree-unchecked" : c );
 if(rc) {
@@ -2843,13 +2843,13 @@
 }
 else {
   if(state) {
-coll = obj.find("li").andSelf();
+coll = obj.find("li").addBack();
 if(!coll.filter(".jstree-checked, .jstree-undetermined").length) { 

[21/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
new file mode 100644
index 000..b93a495
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
 differ


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[11/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index 3338052..af1440a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -243,7 +243,7 @@
 
src/main/resources/webapps/static/dt-1.9.4/js/jquery.dataTables.min.js
 
src/main/resources/webapps/static/jt/jquery.jstree.js
 
src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
-
src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
+
src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
src/main/resources/webapps/static/jquery/themes-1.9.1/base/jquery-ui.css
 
src/test/resources/application_1440536969523_0001.har/_index
 
src/test/resources/application_1440536969523_0001.har/part-0

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
index dba19c9..d4fba1f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
@@ -67,7 +67,7 @@ public class JQueryUI extends HtmlBlock {
   protected void render(Block html) {
 html.link(root_url("static/jquery/themes-1.9.1/base/jquery-ui.css"))
 .link(root_url("static/dt-1.9.4/css/jui-dt.css"))
-.script(root_url("static/jquery/jquery-1.8.2.min.js"))
+.script(root_url("static/jquery/jquery-3.3.1.min.js"))
 .script(root_url("static/jquery/jquery-ui-1.9.1.custom.min.js"))
 .script(root_url("static/dt-1.9.4/js/jquery.dataTables.min.js"))
 .script(root_url("static/yarn.dt.plugins.js"))


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[18/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
new file mode 100644
index 000..8a2e99a
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
@@ -0,0 +1,2377 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+
+if (typeof jQuery === 'undefined') {
+  throw new Error('Bootstrap\'s JavaScript requires jQuery')
+}
+
++function ($) {
+  'use strict';
+  var version = $.fn.jquery.split(' ')[0].split('.')
+  if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 
9 && version[2] < 1) || (version[0] > 3)) {
+throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or 
higher, but lower than version 4')
+  }
+}(jQuery);
+
+/* 
+ * Bootstrap: transition.js v3.3.7
+ * http://getbootstrap.com/javascript/#transitions
+ * 
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *  */
+
+
++function ($) {
+  'use strict';
+
+  // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
+  // 
+
+  function transitionEnd() {
+var el = document.createElement('bootstrap')
+
+var transEndEventNames = {
+  WebkitTransition : 'webkitTransitionEnd',
+  MozTransition: 'transitionend',
+  OTransition  : 'oTransitionEnd otransitionend',
+  transition   : 'transitionend'
+}
+
+for (var name in transEndEventNames) {
+  if (el.style[name] !== undefined) {
+return { end: transEndEventNames[name] }
+  }
+}
+
+return false // explicit for ie8 (  ._.)
+  }
+
+  // http://blog.alexmaccaw.com/css-transitions
+  $.fn.emulateTransitionEnd = function (duration) {
+var called = false
+var $el = this
+$(this).one('bsTransitionEnd', function () { called = true })
+var callback = function () { if (!called) 
$($el).trigger($.support.transition.end) }
+setTimeout(callback, duration)
+return this
+  }
+
+  $(function () {
+$.support.transition = transitionEnd()
+
+if (!$.support.transition) return
+
+$.event.special.bsTransitionEnd = {
+  bindType: $.support.transition.end,
+  delegateType: $.support.transition.end,
+  handle: function (e) {
+if ($(e.target).is(this)) return e.handleObj.handler.apply(this, 
arguments)
+  }
+}
+  })
+
+}(jQuery);
+
+/* 
+ * Bootstrap: alert.js v3.3.7
+ * http://getbootstrap.com/javascript/#alerts
+ * 
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *  */
+
+
++function ($) {
+  'use strict';
+
+  // ALERT CLASS DEFINITION
+  // ==
+
+  var dismiss = '[data-dismiss="alert"]'
+  var Alert   = function (el) {
+$(el).on('click', dismiss, this.close)
+  }
+
+  Alert.VERSION = '3.3.7'
+
+  Alert.TRANSITION_DURATION = 150
+
+  Alert.prototype.close = function (e) {
+var $this= $(this)
+var selector = $this.attr('data-target')
+
+if (!selector) {
+  selector = $this.attr('href')
+  selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip 
for ie7
+}
+
+var $parent = $(selector === '#' ? [] : selector)
+
+if (e) e.preventDefault()
+
+if (!$parent.length) {
+  $parent = $this.closest('.alert')
+}
+
+$parent.trigger(e = $.Event('close.bs.alert'))
+
+if (e.isDefaultPrevented()) return
+
+$parent.removeClass('in')
+
+function removeElement() {
+  // detach from parent, fire event then clean up data
+  $parent.detach().trigger('closed.bs.alert').remove()
+}
+
+$.support.transition && $parent.hasClass('fade') ?
+  $parent
+.one('bsTransitionEnd', removeElement)
+.emulateTransitionEnd(Alert.TRANSITION_DURATION) :
+  removeElement()
+  }
+
+
+  // ALERT PLUGIN DEFINITION
+  // ===
+
+  function Plugin(option) {
+return this.each(function () {
+  var $this = $(this)
+  var data  = $this.data('bs.alert')
+
+  if (!data) $this.data('bs.alert', (data = new Alert(this)))
+  

[34/50] [abbrv] hadoop git commit: HDFS-13641. Add metrics for edit log tailing. Contributed by Chao Sun.

2018-06-14 Thread xkrogen
HDFS-13641. Add metrics for edit log tailing. Contributed by Chao Sun.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8e7548d3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8e7548d3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8e7548d3

Branch: refs/heads/HDFS-12943
Commit: 8e7548d33be9c4874daab18b2e774bdc2ed216d3
Parents: 6307962
Author: Yiqun Lin 
Authored: Wed Jun 13 20:05:55 2018 +0800
Committer: Yiqun Lin 
Committed: Wed Jun 13 20:05:55 2018 +0800

--
 .../hadoop-common/src/site/markdown/Metrics.md  | 12 
 .../org/apache/hadoop/test/MetricsAsserts.java  | 22 ++--
 .../hdfs/server/namenode/ha/EditLogTailer.java  | 11 
 .../namenode/metrics/NameNodeMetrics.java   | 59 +++-
 .../namenode/metrics/TestNameNodeMetrics.java   | 41 ++
 5 files changed, 140 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8e7548d3/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
--
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
index 2538491..676ab0b 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
@@ -181,6 +181,18 @@ Each metrics record contains tags such as ProcessName, 
SessionId, and Hostname a
 | `WarmUpEDEKTimeAvgTime` | Average time of warming up EDEK in milliseconds |
 | `ResourceCheckTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of NameNode resource check latency in milliseconds. 
Percentile measurement is off by default, by watching no intervals. The 
intervals are specified by `dfs.metrics.percentiles.intervals`. |
 | `StorageBlockReport`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of storage block report latency in milliseconds. 
Percentile measurement is off by default, by watching no intervals. The 
intervals are specified by `dfs.metrics.percentiles.intervals`. |
+| `EditLogTailTimeNumOps` | Total number of times the standby NameNode tailed 
the edit log |
+| `EditLogTailTimeAvgTime` | Average time (in milliseconds) spent by standby 
NameNode in tailing edit log |
+| `EditLogTailTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time spent in tailing edit logs by standby 
NameNode, in milliseconds. Percentile measurement is off by default, by 
watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
+| `EditLogFetchTimeNumOps` | Total number of times the standby NameNode 
fetched remote edit streams from journal nodes |
+| `EditLogFetchTimeAvgTime` | Average time (in milliseconds) spent by standby 
NameNode in fetching remote edit streams from journal nodes |
+| `EditLogFetchTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time spent in fetching edit streams from journal 
nodes by standby NameNode, in milliseconds. Percentile measurement is off by 
default, by watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
+| `NumEditLogLoadedNumOps` | Total number of times edits were loaded by 
standby NameNode |
+| `NumEditLogLoadedAvgCount` | Average number of edits loaded by standby 
NameNode in each edit log tailing |
+| `NumEditLogLoaded`*num*`s(50/75/90/95/99)thPercentileCount` | The 
50/75/90/95/99th percentile of number of edits loaded by standby NameNode in 
each edit log tailing. Percentile measurement is off by default, by watching no 
intervals. The intervals are specified by `dfs.metrics.percentiles.intervals`. |
+| `EditLogTailIntervalNumOps` | Total number of intervals between edit log 
tailings by standby NameNode |
+| `EditLogTailIntervalAvgTime` | Average time of intervals between edit log 
tailings by standby NameNode in milliseconds |
+| `EditLogTailInterval`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time between edit log tailings by standby 
NameNode, in milliseconds. Percentile measurement is off by default, by 
watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
 
 FSNamesystem
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8e7548d3/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
 

[29/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap-editable.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap-editable.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap-editable.min.js
deleted file mode 100644
index 539d6c1..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap-editable.min.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/*! X-editable - v1.5.0
-* In-place editing with Twitter Bootstrap, jQuery UI or pure jQuery
-* http://github.com/vitalets/x-editable
-* Copyright (c) 2013 Vitaliy Potapov; Licensed MIT */
-!function(a){"use strict";var 
b=function(b,c){this.options=a.extend({},a.fn.editableform.defaults,c),this.$div=a(b),this.options.scope||(this.options.scope=this)};b.prototype={constructor:b,initInput:function(){this.input=this.options.input,this.value=this.input.str2value(this.options.value),this.input.prerender()},initTemplate:function(){this.$form=a(a.fn.editableform.template)},initButtons:function(){var
 
b=this.$form.find(".editable-buttons");b.append(a.fn.editableform.buttons),"bottom"===this.options.showbuttons&("editable-buttons-bottom")},render:function(){this.$loading=a(a.fn.editableform.loading),this.$div.empty().append(this.$loading),this.initTemplate(),this.options.showbuttons?this.initButtons():this.$form.find(".editable-buttons").remove(),this.showLoading(),this.isSaving=!1,this.$div.triggerHandler("rendering"),this.initInput(),this.$form.find("div.editable-input").append(this.input.$tpl),this.$div.append(this.$form),a.when(this.input.render()).then(a.proxy(fu
 
nction(){if(this.options.showbuttons||this.input.autosubmit(),this.$form.find(".editable-cancel").click(a.proxy(this.cancel,this)),this.input.error)this.error(this.input.error),this.$form.find(".editable-submit").attr("disabled",!0),this.input.$input.attr("disabled",!0),this.$form.submit(function(a){a.preventDefault()});else{this.error(!1),this.input.$input.removeAttr("disabled"),this.$form.find(".editable-submit").removeAttr("disabled");var
 b=null===this.value||void 
0===this.value||""===this.value?this.options.defaultValue:this.value;this.input.value2input(b),this.$form.submit(a.proxy(this.submit,this))}this.$div.triggerHandler("rendered"),this.showForm(),this.input.postrender&()},this))},cancel:function(){this.$div.triggerHandler("cancel")},showLoading:function(){var
 
a,b;this.$form?(a=this.$form.outerWidth(),b=this.$form.outerHeight(),a&$loading.width(a),b&$loading.height(b),this.$form.hide()):(a=this.$loading.parent().width(),a&$loading.wid
 
th(a)),this.$loading.show()},showForm:function(a){this.$loading.hide(),this.$form.show(),a!==!1&(),this.$div.triggerHandler("show")},error:function(b){var
 
c,d=this.$form.find(".control-group"),e=this.$form.find(".editable-error-block");if(b===!1)d.removeClass(a.fn.editableform.errorGroupClass),e.removeClass(a.fn.editableform.errorBlockClass).empty().hide();else{if(b){c=b.split("\n");for(var
 
f=0;f").text(c[f]).html();b=c.join("")}d.addClass(a.fn.editableform.errorGroupClass),e.addClass(a.fn.editableform.errorBlockClass).html(b).show()}},submit:function(b){b.stopPropagation(),b.preventDefault();var
 c,d=this.input.input2value();if(c=this.validate(d))return 
this.error(c),this.showForm(),void 
0;if(!this.options.savenochange&(d)==this.input.value2str(this.value))return
 this.$div.triggerHandler("nochange"),void 0;var 
e=this.input.value2submit(d);this.isSaving=!0,a.when(this.save(e)).done(a.proxy(function(a){this.isSavi
 ng=!1;var b="function"==typeof 
this.options.success?this.options.success.call(this.options.scope,a,d):null;return
 b===!1?(this.error(!1),this.showForm(!1),void 0):"string"==typeof 
b?(this.error(b),this.showForm(),void 0):(b&&"object"==typeof 
b&("newValue")&&(d=b.newValue),this.error(!1),this.value=d,this.$div.triggerHandler("save",{newValue:d,submitValue:e,response:a}),void
 0)},this)).fail(a.proxy(function(a){this.isSaving=!1;var 
b;b="function"==typeof 
this.options.error?this.options.error.call(this.options.scope,a,d):"string"==typeof
 a?a:a.responseText||a.statusText||"Unknown 
error!",this.error(b),this.showForm()},this))},save:function(b){this.options.pk=a.fn.editableutils.tryParseJson(this.options.pk,!0);var
 c,d="function"==typeof 
this.options.pk?this.options.pk.call(this.options.scope):this.options.pk,e=!!("function"==typeof
 
this.options.url||this.options.url&&("always"===this.options.send||"auto"===this.options.send&!==d&
 0!==d));return e?(this.showLoa
 ding(),c={name:this.options.name||"",value:b,pk:d},"function"==typeof 
this.options.params?c=this.options.params.call(this.options.scope,c):(this.options.params=a.fn.editableutils.tryParseJson(this.options.params,!0),a.extend(c,this.options.params)),"function"==typeof
 

[20/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
new file mode 100644
index 000..f155876
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
@@ -0,0 +1,288 @@
+
+http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd; >
+http://www.w3.org/2000/svg;>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

[06/50] [abbrv] hadoop git commit: HADOOP-15527. Improve delay check for stopping processes. Contributed by Vinod Kumar Vavilapalli

2018-06-14 Thread xkrogen
HADOOP-15527.  Improve delay check for stopping processes.
   Contributed by Vinod Kumar Vavilapalli


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/108da853
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/108da853
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/108da853

Branch: refs/heads/HDFS-12943
Commit: 108da85320d65e37fe835de65866b818e5420587
Parents: 5670e89
Author: Eric Yang 
Authored: Tue Jun 12 20:40:32 2018 -0400
Committer: Eric Yang 
Committed: Tue Jun 12 20:40:32 2018 -0400

--
 .../src/main/bin/hadoop-functions.sh| 34 +++-
 .../src/test/scripts/hadoop_stop_daemon.bats| 24 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/108da853/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index bee1430..cbedd972 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -2040,6 +2040,35 @@ function hadoop_start_secure_daemon_wrapper
   return 0
 }
 
+## @description  Wait till process dies or till timeout
+## @audience private
+## @stabilityevolving
+## @parampid
+## @paramtimeout
+function wait_process_to_die_or_timeout
+{
+  local pid=$1
+  local timeout=$2
+
+  # Normalize timeout
+  # Round up or down
+  timeout=$(printf "%.0f\n" "${timeout}")
+  if [[ ${timeout} -lt 1  ]]; then
+# minimum 1 second
+timeout=1
+  fi
+
+  # Wait to see if it's still alive
+  for (( i=0; i < "${timeout}"; i++ ))
+  do
+if kill -0 "${pid}" > /dev/null 2>&1; then
+  sleep 1
+else
+  break
+fi
+  done
+}
+
 ## @description  Stop the non-privileged `command` daemon with that
 ## @description  that is running at `pidfile`.
 ## @audience public
@@ -2060,11 +2089,14 @@ function hadoop_stop_daemon
 pid=$(cat "$pidfile")
 
 kill "${pid}" >/dev/null 2>&1
-sleep "${HADOOP_STOP_TIMEOUT}"
+
+wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
+
 if kill -0 "${pid}" > /dev/null 2>&1; then
   hadoop_error "WARNING: ${cmd} did not stop gracefully after 
${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
   kill -9 "${pid}" >/dev/null 2>&1
 fi
+wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
 if ps -p "${pid}" > /dev/null 2>&1; then
   hadoop_error "ERROR: Unable to kill ${pid}"
 else

http://git-wip-us.apache.org/repos/asf/hadoop/blob/108da853/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
index 023d01c..1483807 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
@@ -15,7 +15,7 @@
 
 load hadoop-functions_test_helper
 
-@test "hadoop_stop_daemon" {
+@test "hadoop_stop_daemon_changing_pid" {
   old_pid=12345
   new_pid=54321
   HADOOP_STOP_TIMEOUT=3
@@ -29,3 +29,25 @@ load hadoop-functions_test_helper
   [ -f pidfile ]
   [ "$(cat pidfile)" = "${new_pid}" ]
 }
+
+@test "hadoop_stop_daemon_force_kill" {
+
+  HADOOP_STOP_TIMEOUT=4
+
+  # Run the following in a sub-shell so that its termination doesn't affect 
the test
+  (sh ${TESTBINDIR}/process_with_sigterm_trap.sh ${TMP}/pidfile &)
+
+  # Wait for the process to go into tight loop
+  sleep 1
+
+  [ -f ${TMP}/pidfile ]
+  pid=$(cat "${TMP}/pidfile")
+
+  run hadoop_stop_daemon my_command ${TMP}/pidfile 2>&1
+
+  # The process should no longer be alive
+  ! kill -0 ${pid} > /dev/null 2>&1
+
+  # The PID file should be gone
+  [ ! -f ${TMP}/pidfile ]
+}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[14/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
index 05199bd..9068992 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/moment.min.js
@@ -1,7 +1,5 @@
 //! moment.js
-//! version : 2.10.3
-//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
+//! version : 2.22.1
 //! license : MIT
 //! momentjs.com
-!function(a,b){"object"==typeof exports&&"undefined"!=typeof 
module?module.exports=b():"function"==typeof 
define&?define(b):a.moment=b()}(this,function(){"use 
strict";function a(){return Dc.apply(null,arguments)}function 
b(a){Dc=a}function c(a){return"[object 
Array]"===Object.prototype.toString.call(a)}function d(a){return a instanceof 
Date||"[object Date]"===Object.prototype.toString.call(a)}function e(a,b){var 
c,d=[];for(c=0;c0)for(c in 
Fc)d=Fc[c],e=b[d],"undefined"!=typeof e&&(a[d]=e);return a}function 
n(b){m(this,b),this._d=new Date(+b._d),Gc===!1&&(Gc=!0,a.updateOff
 set(this),Gc=!1)}function o(a){return a instanceof 
n||null!=a&!=a._isAMomentObject}function p(a){var b=+a,c=0;return 
0!==b&(b)&&(c=b>=0?Math.floor(b):Math.ceil(b)),c}function q(a,b,c){var 
d,e=Math.min(a.length,b.length),f=Math.abs(a.length-b.length),g=0;for(d=0;e>d;d++)(c&[d]!==b[d]||!c&(a[d])!==p(b[d]))&++;return
 g+f}function r(){}function s(a){return 
a?a.toLowerCase().replace("_","-"):a}function t(a){for(var 
b,c,d,e,f=0;f0;){if(d=u(e.slice(0,b).join("-")))return
 d;if(c&>=b&(e,c,!0)>=b-1)break;b--}f++}return null}function 
u(a){var b=null;if(!Hc[a]&&"undefined"!=typeof 
module&&)try{b=Ec._abbr,require("./locale/"+a),v(b)}catch(c){}return
 Hc[a]}function v(a,b){var c;return a&&(c="undefined"==typeof 
b?x(a):w(a,b),c&&(Ec=c)),Ec._abbr}function w(a,b){return 
null!==b?(b.abbr=a,Hc[a]||(Hc[a]=new r),Hc[a].set(b),v(a),Hc[a]):(delete 
Hc[a],null)}function x(a
 ){var b;if(a&_locale&_locale._abbr&&(a=a._locale._abbr),!a)return 
Ec;if(!c(a)){if(b=u(a))return b;a=[a]}return t(a)}function y(a,b){var 
c=a.toLowerCase();Ic[c]=Ic[c+"s"]=Ic[b]=a}function z(a){return"string"==typeof 
a?Ic[a]||Ic[a.toLowerCase()]:void 0}function A(a){var b,c,d={};for(c in 
a)f(a,c)&&(b=z(c),b&&(d[b]=a[c]));return d}function B(b,c){return 
function(d){return 
null!=d?(D(this,b,d),a.updateOffset(this,c),this):C(this,b)}}function 
C(a,b){return a._d["get"+(a._isUTC?"UTC":"")+b]()}function D(a,b,c){return 
a._d["set"+(a._isUTC?"UTC":"")+b](c)}function E(a,b){var c;if("object"==typeof 
a)for(c in a)this.set(c,a[c]);else if(a=z(a),"function"==typeof this[a])return 
this[a](b);return this}function F(a,b,c){for(var 
d=""+Math.abs(a),e=a>=0;d.lengthb;b++)Mc[d[b]]?d[b]=Mc[d[b]]:d[b]=H(d[b]);return
 function(e){var f="";for(b=0;c>b;b++)f+=d[b]instanceof 
Function?d[b].call(e,a):d[b];return f}}function J(a,b){return 
a.isValid()?(b=K(b,a.localeData()),Lc[b]||(Lc[b]=I(b)),Lc[b](a)):a.localeData().invalidDate()}function
 K(a,b){function c(a){return b.longDateFormat(a)||a}var 
d=5;for(Kc.lastIndex=0;d>=0&(a);)a=a.replace(Kc,c),Kc.lastIndex=0,d-=1;return
 a}function L(a,b,c){_c[a]="function"==typeof b?b:function(a){return 
a&?c:b}}function M(a,b){return f(_c,a)?_c[a](b._strict,b._locale):new 
RegExp(N(a))}function N(a){return 
a.replace("\\","").replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(a,b,c,d,e){return
 b||c||d||e}).replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}function O(a,b){var 
c,d=b;for("string"==typeof a&&(a=[a]),"number"==typeof 
 b&&(d=function(a,c){c[b]=p(a)}),c=0;cd;d++){if(e=h([2e3,d]),c&&!this._longMonthsParse[d]&&(this._longMonthsParse[d]=new
 
RegExp("^"+this.months(e,"").replace(".","")+"$","i"),this._shortMonthsParse[d]=new
 
RegExp("^"+this.monthsShort(e,"").replace(".","")+"$","i")),c||this._monthsParse[d]||(f="^"+this.months(e,"")+"|^"+this.monthsShort(e,""),this._monthsParse[d]=new
 
RegExp(f.replace(".",""),"i")),c&&""===b&_longMonthsParse[d].test(a))return
 d;if(c&&"MMM"===b&_shortMonthsParse[d].test(a))return 
d;if(!c&_monthsParse[d].test(a))return d
 }}function V(a,b){var c;return"string"==typeof 
b&&(b=a.localeData().monthsParse(b),"number"!=typeof 
b)?a:(c=Math.min(a.date(),R(a.year(),b)),a._d["set"+(a._isUTC?"UTC":"")+"Month"](b,c),a)}function
 W(b){return 
null!=b?(V(this,b),a.updateOffset(this,!0),this):C(this,"Month")}function 
X(){return R(this.year(),this.month())}function Y(a){var b,c=a._a;return 
c&&-2===j(a).overflow&&(b=c[cd]<0||c[cd]>11?cd:c[dd]<1||c[dd]>R(c[bd],c[cd])?dd:c[ed]<0||c[ed]>24||24===c[ed]&&(0!==c[fd]||0!==c[gd]||0!==c[hd])?ed:c[fd]<0||c[fd]>59?fd:c[gd]<0||c[gd]>59?gd:c[hd]<0||c[hd]>999?hd:-1,j(a)._overflowDayOfYear&&(bd>b||b>dd)&&(b=dd),j(a).overflow=b),a}function
 

[23/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
new file mode 100644
index 000..ed3905e
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
@@ -0,0 +1,6 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css 
*/html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px
 dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 
0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em
 
40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;fo
 
nt-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html
 
input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html
 
input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em
 .625em .75em;margin:0 2px;border:1px solid 
silver}legend{padding:0;border:0}textarea{overflow:
 
auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*!
 Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css 
*/@media 
print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0
 
0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:"
 (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) 
")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px
 solid 
#999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px
 solid #000}.table{border-collapse:collapse!important}.table td,.table 
th{background-color:#fff!important}.table-bordered td,.table-bordered 
th{border:1px so
 lid #ddd!important}}@font-face{font-family:'Glyphicons 
Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix)
 format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) 
format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) 
format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) 
format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular)
 
format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons
 
Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\
 

[05/50] [abbrv] hadoop git commit: MAPREDUCE-7101. Add config parameter to allow JHS to alway scan user dir irrespective of modTime. (Thomas Marquardt via asuresh)

2018-06-14 Thread xkrogen
MAPREDUCE-7101. Add config parameter to allow JHS to alway scan user dir 
irrespective of modTime. (Thomas Marquardt via asuresh)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5670e89b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5670e89b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5670e89b

Branch: refs/heads/HDFS-12943
Commit: 5670e89b2ec69ab71e32dcd5acbd3a57ca6abea5
Parents: aeaf9fe
Author: Arun Suresh 
Authored: Tue Jun 12 15:36:52 2018 -0700
Committer: Arun Suresh 
Committed: Tue Jun 12 15:36:52 2018 -0700

--
 .../hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java   | 9 +++--
 .../src/main/resources/mapred-default.xml   | 9 +
 .../apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java   | 8 +++-
 3 files changed, 23 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
index 1cadf84..9e964e1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
@@ -61,8 +61,13 @@ public class JHAdminConfig {
 MR_HISTORY_PREFIX + "cleaner.interval-ms";
   public static final long DEFAULT_MR_HISTORY_CLEANER_INTERVAL_MS = 
 1 * 24 * 60 * 60 * 1000l; //1 day
-  
-  
+
+  /** Always scan user dir, irrespective of dir modification time.*/
+  public static final String MR_HISTORY_ALWAYS_SCAN_USER_DIR =
+  MR_HISTORY_PREFIX + "always-scan-user-dir";
+  public static final boolean DEFAULT_MR_HISTORY_ALWAYS_SCAN_USER_DIR =
+  false;
+
   /** The number of threads to handle client API requests.*/
   public static final String MR_HISTORY_CLIENT_THREAD_COUNT = 
 MR_HISTORY_PREFIX + "client.thread-count";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
index dcb312c..9f33d65 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
@@ -1775,6 +1775,15 @@
 
 
 
+  mapreduce.jobhistory.always-scan-user-dir
+  false
+  Some Cloud FileSystems do not currently update the
+  modification time of directories. To support these filesystems, this
+  configuration value should be set to 'true'.
+  
+
+
+
   mapreduce.jobhistory.done-dir
   ${yarn.app.mapreduce.am.staging-dir}/history/done
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
index a07ca26..7fe99a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
@@ -324,7 +324,13 @@ public class HistoryFileManager extends AbstractService {
   // so we need to have additional check.
   // Note: modTime (X second Y millisecond) could be casted to X second or
   // X+1 second.
-  if (modTime != newModTime
+  

[26/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
new file mode 100644
index 000..94813e9
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["less/theme.less","less/mixins/vendor-prefixes.less","less/mixins/gradients.less","less/mixins/reset-filter.less"],"names":[],"mappings":"AAmBA,YAAA,aAAA,UAAA,aAAA,aAAA,aAME,YAAA,EAAA,KAAA,EAAA,eC2CA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBDvCR,mBAAA,mBAAA,oBAAA,oBAAA,iBAAA,iBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBCsCA,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBDlCR,qBAAA,sBAAA,sBAAA,uBAAA,mBAAA,oBAAA,sBAAA,uBAAA,sBAAA,uBAAA,sBAAA,uBAAA,+BAAA,gCAAA,6BAAA,gCAAA,gCAAA,gCCiCA,mBAAA,KACQ,WAAA,KDlDV,mBAAA,oBAAA,iBAAA,oBAAA,oBAAA,oBAuBI,YAAA,KAyCF,YAAA,YAEE,iBAAA,KAKJ,aErEI,YAAA,EAAA,IAAA,EAAA,KACA,iBAAA,iDACA,iBAAA,4CAAA,iBAAA,qEAEA,iBAAA,+CCnBF,OAAA,+GH4CA,OAAA,0DACA,kBAAA,SAuC2C,aAAA,QAA2B,aAAA,KArCtE,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,Q
 
ACA,iBAAA,KAgBN,aEtEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAiBN,aEvEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAkBN,UExEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,gBAAA,gBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,iBAAA,iBAEE,iBAAA,QACA,aAAA,QAMA,mBAAA,0BAAA,yBAAA,0BAAA,yBAAA,yBAAA,oBAAA,2BAAA,0BAAA,2BAAA,0BAAA,0BAAA,6BAAA,oCAAA,mCAAA,oCAAA,mCAAA,mCAME,iBAAA,QACA,iBAAA,KAmBN,aEzEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4
 
CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAoBN,YE1EI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,kBAAA,kBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,mBAAA,mBAEE,iBAAA,QACA,aAAA,QAMA,qBAAA,4BAAA,2BAAA,4BAAA,2BAAA,2BAAA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,+BAAA,sCAAA,qCAAA,sCAAA,qCAAA,qCAME,iBAAA,QACA,iBAAA,KA2BN,eAAA,WClCE,mBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,EAAA,IAAA,IAAA,iBD2CV,0BAAA,0BE3FI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GF0FF,kBAAA,SAEF,yBAAA,+BAAA,+BEhGI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GFgGF,kBAAA,SASF,gBE7GI,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SH+HA,cAAA,ICjEA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBD6DV,
 
sCAAA,oCE7GI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBD0EV,cAAA,iBAEE,YAAA,EAAA,IAAA,EAAA,sBAIF,gBEhII,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SHkJA,cAAA,IAHF,sCAAA,oCEhII,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,gBDgFV,8BAAA,iCAYI,YAAA,EAAA,KAAA,EAAA,gBAKJ,qBAAA,kBAAA,mBAGE,cAAA,EAqBF,yBAfI,mDAAA,yDAAA,yDAGE,MAAA,KE7JF,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,UFqKJ,OACE,YAAA,EAAA,IAAA,EAAA,qBC3HA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBDsIV,eEtLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAKF,YEvLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAMF,eExLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aA
 

[43/50] [abbrv] hadoop git commit: YARN-8410. Fixed a bug in A record lookup by CNAME record. Contributed by Shane Kumpf

2018-06-14 Thread xkrogen
YARN-8410.  Fixed a bug in A record lookup by CNAME record.
Contributed by Shane Kumpf


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/95917650
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/95917650
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/95917650

Branch: refs/heads/HDFS-12943
Commit: 9591765040b85927ac69179ab46383eef9560a28
Parents: 8d4926f
Author: Eric Yang 
Authored: Thu Jun 14 15:54:21 2018 -0400
Committer: Eric Yang 
Committed: Thu Jun 14 15:54:21 2018 -0400

--
 .../hadoop/registry/server/dns/RegistryDNS.java | 29 
 .../registry/server/dns/TestRegistryDNS.java| 23 ++--
 2 files changed, 44 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/95917650/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
index 5e994fb..0022843 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
@@ -1126,19 +1126,38 @@ public class RegistryDNS extends AbstractService 
implements DNSOperations,
*/
   private byte remoteLookup(Message response, Name name, int type,
   int iterations) {
+// If retrieving the root zone, query for NS record type
+if (name.toString().equals(".")) {
+  type = Type.NS;
+}
+
+// Always add any CNAMEs to the response first
+if (type != Type.CNAME) {
+  Record[] cnameAnswers = getRecords(name, Type.CNAME);
+  if (cnameAnswers != null) {
+for (Record cnameR : cnameAnswers) {
+  if (!response.findRecord(cnameR)) {
+response.addRecord(cnameR, Section.ANSWER);
+  }
+}
+  }
+}
+
 // Forward lookup to primary DNS servers
 Record[] answers = getRecords(name, type);
 try {
   for (Record r : answers) {
-if (r.getType() == Type.SOA) {
-  response.addRecord(r, Section.AUTHORITY);
-} else {
-  response.addRecord(r, Section.ANSWER);
+if (!response.findRecord(r)) {
+  if (r.getType() == Type.SOA) {
+response.addRecord(r, Section.AUTHORITY);
+  } else {
+response.addRecord(r, Section.ANSWER);
+  }
 }
 if (r.getType() == Type.CNAME) {
   Name cname = ((CNAMERecord) r).getAlias();
   if (iterations < 6) {
-remoteLookup(response, cname, Type.CNAME, iterations + 1);
+remoteLookup(response, cname, type, iterations + 1);
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/95917650/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
index 6ba58dd..969faf9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
@@ -410,7 +410,7 @@ public class TestRegistryDNS extends Assert {
 return recs;
   }
 
-  Record[] assertDNSQueryNotNull(String lookup, int type)
+  Record[] assertDNSQueryNotNull(String lookup, int type, int answerCount)
   throws IOException {
 Name name = Name.fromString(lookup);
 Record question = Record.newRecord(name, type, DClass.IN);
@@ -424,7 +424,7 @@ public class TestRegistryDNS extends Assert {
 assertEquals("Questions do not match", query.getQuestion(),
 response.getQuestion());
 Record[] recs = response.getSectionArray(Section.ANSWER);
-assertEquals(1, recs.length);
+assertEquals(answerCount, recs.length);
 assertEquals(recs[0].getType(), type);
 return recs;
   }
@@ -656,7 +656,24 @@ public class TestRegistryDNS extends Assert {
 
 // start assessing whether correct records are available
 Record[] recs =
-

[22/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.map
new file mode 100644
index 000..6c7fa40
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["less/normalize.less","less/print.less","bootstrap.css","dist/css/bootstrap.css","less/glyphicons.less","less/scaffolding.less","less/mixins/vendor-prefixes.less","less/mixins/tab-focus.less","less/mixins/image.less","less/type.less","less/mixins/text-emphasis.less","less/mixins/background-variant.less","less/mixins/text-overflow.less","less/code.less","less/grid.less","less/mixins/grid.less","less/mixins/grid-framework.less","less/tables.less","less/mixins/table-row.less","less/forms.less","less/mixins/forms.less","less/buttons.less","less/mixins/buttons.less","less/mixins/opacity.less","less/component-animations.less","less/dropdowns.less","less/mixins/nav-divider.less","less/mixins/reset-filter.less","less/button-groups.less","less/mixins/border-radius.less","less/input-groups.less","less/navs.less","less/navbar.less","less/mixins/nav-vertical-align.less","less/utilities.less","less/breadcrumbs.less","less/pagination.less","less/mixins/pagination.less","le
 
ss/pager.less","less/labels.less","less/mixins/labels.less","less/badges.less","less/jumbotron.less","less/thumbnails.less","less/alerts.less","less/mixins/alerts.less","less/progress-bars.less","less/mixins/gradients.less","less/mixins/progress-bar.less","less/media.less","less/list-group.less","less/mixins/list-group.less","less/panels.less","less/mixins/panels.less","less/responsive-embed.less","less/wells.less","less/close.less","less/modals.less","less/tooltip.less","less/mixins/reset-text.less","less/popovers.less","less/carousel.less","less/mixins/clearfix.less","less/mixins/center-block.less","less/mixins/hide-text.less","less/responsive-utilities.less","less/mixins/responsive-visibility.less"],"names":[],"mappings":"4EAQA,KACE,YAAA,WACA,yBAAA,KACA,qBAAA,KAOF,KACE,OAAA,EAaF,QAAA,MAAA,QAAA,WAAA,OAAA,OAAA,OAAA,OAAA,KAAA,KAAA,IAAA,QAAA,QAaE,QAAA,MAQF,MAAA,OAAA,SAAA,MAIE,QAAA,aACA,eAAA,SAQF,sBACE,QAAA,KACA,OAAA,EAQF,SAAA,SAEE,QAAA,KAUF,EACE,iBAAA,YAQF,SAAA,QAEE,QAAA,EAUF,YAC
 
E,cAAA,IAAA,OAOF,EAAA,OAEE,YAAA,IAOF,IACE,WAAA,OAQF,GACE,OAAA,MAAA,EACA,UAAA,IAOF,KACE,MAAA,KACA,WAAA,KAOF,MACE,UAAA,IAOF,IAAA,IAEE,SAAA,SACA,UAAA,IACA,YAAA,EACA,eAAA,SAGF,IACE,IAAA,MAGF,IACE,OAAA,OAUF,IACE,OAAA,EAOF,eACE,SAAA,OAUF,OACE,OAAA,IAAA,KAOF,GACE,OAAA,EAAA,mBAAA,YAAA,gBAAA,YACA,WAAA,YAOF,IACE,SAAA,KAOF,KAAA,IAAA,IAAA,KAIE,YAAA,UAAA,UACA,UAAA,IAkBF,OAAA,MAAA,SAAA,OAAA,SAKE,OAAA,EACA,KAAA,QACA,MAAA,QAOF,OACE,SAAA,QAUF,OAAA,OAEE,eAAA,KAWF,OAAA,wBAAA,kBAAA,mBAIE,mBAAA,OACA,OAAA,QAOF,iBAAA,qBAEE,OAAA,QAOF,yBAAA,wBAEE,QAAA,EACA,OAAA,EAQF,MACE,YAAA,OAWF,qBAAA,kBAEE,mBAAA,WAAA,gBAAA,WAAA,WAAA,WACA,QAAA,EASF,8CAAA,8CAEE,OAAA,KAQF,mBACE,mBAAA,YACA,gBAAA,YAAA,WAAA,YAAA,mBAAA,UASF,iDAAA,8CAEE,mBAAA,KAOF,SACE,QAAA,MAAA,OAAA,MACA,OAAA,EAAA,IACA,OAAA,IAAA,MAAA,OAQF,OACE,QAAA,EACA,OAAA,EAOF,SACE,SAAA,KAQF,SACE,YAAA,IAUF,MACE,eAAA,EACA,gBAAA,SAGF,GAAA,GAEE,QAAA,uFCjUF,aA7FI,EAAA,OAAA,QAGI,MAAA,eACA,YAAA,eACA,WAAA,cAAA,mBAAA,eACA,WAAA,eAGJ,EAAA,UAEI,gBAAA,UAGJ,cACI,QAAA,KAAA,WAAA,IAGJ,kBACI
 
,QAAA,KAAA,YAAA,IAKJ,6BAAA,mBAEI,QAAA,GAGJ,WAAA,IAEI,OAAA,IAAA,MAAA,KC4KL,kBAAA,MDvKK,MC0KL,QAAA,mBDrKK,IE8KN,GDLC,kBAAA,MDrKK,ICwKL,UAAA,eCUD,GF5KM,GE2KN,EF1KM,QAAA,ECuKL,OAAA,ECSD,GF3KM,GCsKL,iBAAA,MD/JK,QCkKL,QAAA,KCSD,YFtKU,oBCiKT,iBAAA,eD7JK,OCgKL,OAAA,IAAA,MAAA,KD5JK,OC+JL,gBAAA,mBCSD,UFpKU,UC+JT,iBAAA,eDzJS,mBEkKV,mBDLC,OAAA,IAAA,MAAA,gBEjPD,WACA,YAAA,uBFsPD,IAAA,+CE7OC,IAAK,sDAAuD,4BAA6B,iDAAkD,gBAAiB,gDAAiD,eAAgB,+CAAgD,mBAAoB,2EAA4E,cAE7W,WACA,SAAA,SACA,IAAA,IACA,QAAA,aACA,YAAA,uBACA,WAAA,OACA,YAAA,IACA,YAAA,EAIkC,uBAAA,YAAW,wBAAA,UACX,2BAAW,QAAA,QAEX,uBDuPlC,QAAS,QCtPyB,sBFiPnC,uBEjP8C,QAAA,QACX,wBAAW,QAAA,QACX,wBAAW,QAAA,QACX,2BAAW,QAAA,QACX,yBAAW,QAAA,QACX,wBAAW,QAAA,QACX,wBAAW,QAAA,QACX,yBAAW,QAAA,QACX,wBAAW,QAAA,QACX,uBAAW,QAAA,QACX,6BAAW,QAAA,QACX,uBAAW,QAAA,QACX,uBAAW,QAAA,QACX,2BAAW,QAAA,QACX,qBAAW,QAAA,QACX,0BAAW,QAAA,QACX,qBAAW,QAAA,QACX,yBAAW,QAAA,QACX,0BAAW,QAAA,QACX,2BAAW,QAAA,QACX,sBAAW,QAAA,QACX,yBAAW,QAAA,QACX,sBAAW,QAAA,QACX,wBAAW,QAAA,QACX,uBAAW,QAAA,QACX
 

[33/50] [abbrv] hadoop git commit: YARN-8404. Timeline event publish need to be async to avoid Dispatcher thread leak in case ATS is down. Contributed by Rohith Sharma K S

2018-06-14 Thread xkrogen
YARN-8404. Timeline event publish need to be async to avoid Dispatcher thread 
leak in case ATS is down. Contributed by Rohith Sharma K S


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6307962b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6307962b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6307962b

Branch: refs/heads/HDFS-12943
Commit: 6307962b932e0ee69ba61f5796388c175d79195a
Parents: f4c7c91
Author: Sunil G 
Authored: Wed Jun 13 16:09:16 2018 +0530
Committer: Sunil G 
Committed: Wed Jun 13 16:09:16 2018 +0530

--
 .../resourcemanager/metrics/TimelineServiceV1Publisher.java| 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6307962b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
index 31b07f1..dc5292b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
@@ -153,9 +153,9 @@ public class TimelineServiceV1Publisher extends 
AbstractSystemMetricsPublisher {
 tEvent.setEventInfo(eventInfo);
 
 entity.addEvent(tEvent);
-// sync sending of finish event to avoid possibility of saving application
-// finished state in RMStateStore save without publishing in ATS.
-putEntity(entity); // sync event so that ATS update is done without fail.
+
+getDispatcher().getEventHandler().handle(new TimelineV1PublishEvent(
+SystemMetricsEventType.PUBLISH_ENTITY, entity, 
app.getApplicationId()));
   }
 
   @SuppressWarnings("unchecked")


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[39/50] [abbrv] hadoop git commit: HDDS-161. Add functionality to queue ContainerClose command from SCM Heartbeat Response to Ratis. Contributed by Shashikant Banerjee.

2018-06-14 Thread xkrogen
HDDS-161. Add functionality to queue ContainerClose command from SCM Heartbeat 
Response to Ratis.
Contributed by Shashikant Banerjee.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7547740e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7547740e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7547740e

Branch: refs/heads/HDFS-12943
Commit: 7547740e5c65edaa6c6f8aa1c8debabbdfb0945e
Parents: 2299488
Author: Anu Engineer 
Authored: Wed Jun 13 17:50:42 2018 -0700
Committer: Anu Engineer 
Committed: Wed Jun 13 18:48:59 2018 -0700

--
 .../statemachine/DatanodeStateMachine.java  |   9 +
 .../CloseContainerCommandHandler.java   |  21 +-
 .../commandhandler/CommandDispatcher.java   |   4 +
 .../common/transport/server/XceiverServer.java  |   7 +
 .../transport/server/XceiverServerGrpc.java |   9 +
 .../transport/server/XceiverServerSpi.java  |   7 +
 .../server/ratis/XceiverServerRatis.java|  56 -
 .../container/ozoneimpl/OzoneContainer.java |  62 +-
 .../commands/CloseContainerCommand.java |  12 +-
 .../StorageContainerDatanodeProtocol.proto  |   1 +
 .../container/CloseContainerEventHandler.java   |   3 +-
 .../scm/container/closer/ContainerCloser.java   |   7 +-
 .../TestCloseContainerByPipeline.java   | 221 +++
 .../TestCloseContainerHandler.java  |   7 +-
 14 files changed, 412 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7547740e/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
index cb4319d..dc4e673 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
@@ -403,4 +403,13 @@ public class DatanodeStateMachine implements Closeable {
   public long getCommandHandled() {
 return commandsHandled;
   }
+
+  /**
+   * returns the Command Dispatcher.
+   * @return CommandDispatcher
+   */
+  @VisibleForTesting
+  public CommandDispatcher getCommandDispatcher() {
+return commandDispatcher;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7547740e/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
index e8c602d..45f2bbd 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
@@ -16,6 +16,8 @@
  */
 package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
 
+import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.protocol.proto
 .StorageContainerDatanodeProtocolProtos.SCMCommandProto;
 import org.apache.hadoop.hdds.protocol.proto
@@ -29,6 +31,8 @@ import org.apache.hadoop.util.Time;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.UUID;
+
 /**
  * Handler for close container command received from SCM.
  */
@@ -67,8 +71,23 @@ public class CloseContainerCommandHandler implements 
CommandHandler {
   CloseContainerCommandProto
   .parseFrom(command.getProtoBufMessage());
   containerID = closeContainerProto.getContainerID();
+  HddsProtos.ReplicationType replicationType =
+  closeContainerProto.getReplicationType();
+
+  ContainerProtos.CloseContainerRequestProto.Builder closeRequest =
+  ContainerProtos.CloseContainerRequestProto.newBuilder();
+  closeRequest.setContainerID(containerID);
 
-  container.getContainerManager().closeContainer(containerID);
+  

[10/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
deleted file mode 100644
index bc3fbc8..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-/*! jQuery v1.8.2 jquery.com | jquery.org/license */
-(function(a,b){function G(a){var b=F[a]={};return 
p.each(a.split(s),function(a,c){b[c]=!0}),b}function 
J(a,c,d){if(d===b&===1){var 
e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof 
d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else
 d=b}return d}function K(a){var b;for(b in 
a){if(b==="data"&(a[b]))continue;if(b!=="toJSON")return!1}return!0}function
 ba(){return!1}function bb(){return!0}function 
bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do 
a=a[b];while(a&!==1);return a}function 
bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var 
e=!!b.call(a,d,a);return e===c});if(b.nodeType)return 
p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var 
d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return 
p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(
 a,b)>=0===c})}function bk(a){var 
b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return
 c}function bC(a,b){return 
a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function
 bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var 
c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete 
g.handle,g.events={};for(c in 
h)for(d=0,e=h[c].length;d").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.
 
createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write(""),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return
 bS[a]=c,c}function ci(a,b,c,d){var 
e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof 
e=="object"?b:"")+"]",e,c,d)});else if(!c&(b)==="object")for(e in 
b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return 
function(b,c){typeof b!="string"&&(c=b,b="*");var 
d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init
 :function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return 
this.context=this[0]=a,this.length=1,this;if(typeof 
a=="string"){a.charAt(0)==="<"&(a.length-1)===">"&>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return
 c=c instanceof 
p?c[0]:c,i=c&?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&(c)&(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&){if(g.id!==f[2])return
 d.find(a);this.length=1,this[0]=g}return 
this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return
 
p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.2",length:0,size:function(){return
 this.length},toArray:function(){return k.call(this)},get:function(a){return 
a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var
 d=p.merge(this.constructor(),a);ret
 urn 
d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?"
 
":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return
 p.each(this,a,b)},ready:function(a){return 
p.ready.promise().done(a),this},eq:function(a){return 
a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return 
this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return
 this.pushStack(p.map(this,function(b,c){return 
a.call(b,c,b)}))},end:function(){return 

[38/50] [abbrv] hadoop git commit: YARN-8259. Improve privileged docker container liveliness checks. Contributed by Shane Kumpf

2018-06-14 Thread xkrogen
YARN-8259.  Improve privileged docker container liveliness checks.
Contributed by Shane Kumpf


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/22994889
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/22994889
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/22994889

Branch: refs/heads/HDFS-12943
Commit: 22994889dc449f966fb6462a3ac3d3bbaee3ac6a
Parents: 69b0596
Author: Eric Yang 
Authored: Wed Jun 13 19:24:31 2018 -0400
Committer: Eric Yang 
Committed: Wed Jun 13 19:24:31 2018 -0400

--
 .../runtime/DockerLinuxContainerRuntime.java| 26 ++-
 .../runtime/LinuxContainerRuntimeConstants.java |  2 ++
 .../runtime/TestDockerContainerRuntime.java | 34 +---
 .../src/site/markdown/DockerContainers.md   | 15 +
 4 files changed, 49 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/22994889/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
index e19379f..f13ba59 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
@@ -191,6 +191,7 @@ public class DockerLinuxContainerRuntime implements 
LinuxContainerRuntime {
   private static final Pattern USER_MOUNT_PATTERN = Pattern.compile(
   "(?<=^|,)([^:\\x00]+):([^:\\x00]+):([a-z]+)");
   private static final int HOST_NAME_LENGTH = 64;
+  private static final String DEFAULT_PROCFS = "/proc";
 
   @InterfaceAudience.Private
   public static final String ENV_DOCKER_CONTAINER_IMAGE =
@@ -1192,24 +1193,15 @@ public class DockerLinuxContainerRuntime implements 
LinuxContainerRuntime {
 
   private void executeLivelinessCheck(ContainerRuntimeContext ctx)
   throws ContainerExecutionException {
-PrivilegedOperation signalOp = new PrivilegedOperation(
-PrivilegedOperation.OperationType.SIGNAL_CONTAINER);
-signalOp.appendArgs(ctx.getExecutionAttribute(RUN_AS_USER),
-ctx.getExecutionAttribute(USER), Integer.toString(
-PrivilegedOperation.RunAsUserCommand.SIGNAL_CONTAINER.getValue()),
-ctx.getExecutionAttribute(PID),
-Integer.toString(ctx.getExecutionAttribute(SIGNAL).getValue()));
-signalOp.disableFailureLogging();
-try {
-  privilegedOperationExecutor.executePrivilegedOperation(null, signalOp,
-  null, ctx.getContainer().getLaunchContext().getEnvironment(), false,
-  false);
-} catch (PrivilegedOperationException e) {
-  String msg = "Liveliness check failed for PID: "
-  + ctx.getExecutionAttribute(PID)
+String procFs = ctx.getExecutionAttribute(PROCFS);
+if (procFs == null || procFs.isEmpty()) {
+  procFs = DEFAULT_PROCFS;
+}
+String pid = ctx.getExecutionAttribute(PID);
+if (!new File(procFs + File.separator + pid).exists()) {
+  String msg = "Liveliness check failed for PID: " + pid
   + ". Container may have already completed.";
-  throw new ContainerExecutionException(msg, e.getExitCode(), 
e.getOutput(),
-  e.getErrorOutput());
+  throw new ContainerExecutionException(msg);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/22994889/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/LinuxContainerRuntimeConstants.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/LinuxContainerRuntimeConstants.java
 

[19/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
new file mode 100644
index 000..1413fc6
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
new file mode 100644
index 000..9e61285
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
new file mode 100644
index 000..64539b5
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
new file mode 100644
index 000..539d6c1
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
@@ -0,0 +1,7 @@
+/*! X-editable - v1.5.0
+* In-place editing with Twitter Bootstrap, jQuery UI or pure jQuery
+* http://github.com/vitalets/x-editable
+* Copyright (c) 2013 Vitaliy Potapov; Licensed MIT */
+!function(a){"use strict";var 
b=function(b,c){this.options=a.extend({},a.fn.editableform.defaults,c),this.$div=a(b),this.options.scope||(this.options.scope=this)};b.prototype={constructor:b,initInput:function(){this.input=this.options.input,this.value=this.input.str2value(this.options.value),this.input.prerender()},initTemplate:function(){this.$form=a(a.fn.editableform.template)},initButtons:function(){var
 
b=this.$form.find(".editable-buttons");b.append(a.fn.editableform.buttons),"bottom"===this.options.showbuttons&("editable-buttons-bottom")},render:function(){this.$loading=a(a.fn.editableform.loading),this.$div.empty().append(this.$loading),this.initTemplate(),this.options.showbuttons?this.initButtons():this.$form.find(".editable-buttons").remove(),this.showLoading(),this.isSaving=!1,this.$div.triggerHandler("rendering"),this.initInput(),this.$form.find("div.editable-input").append(this.input.$tpl),this.$div.append(this.$form),a.when(this.input.render()).then(a.proxy(fu
 
nction(){if(this.options.showbuttons||this.input.autosubmit(),this.$form.find(".editable-cancel").click(a.proxy(this.cancel,this)),this.input.error)this.error(this.input.error),this.$form.find(".editable-submit").attr("disabled",!0),this.input.$input.attr("disabled",!0),this.$form.submit(function(a){a.preventDefault()});else{this.error(!1),this.input.$input.removeAttr("disabled"),this.$form.find(".editable-submit").removeAttr("disabled");var
 b=null===this.value||void 
0===this.value||""===this.value?this.options.defaultValue:this.value;this.input.value2input(b),this.$form.submit(a.proxy(this.submit,this))}this.$div.triggerHandler("rendered"),this.showForm(),this.input.postrender&()},this))},cancel:function(){this.$div.triggerHandler("cancel")},showLoading:function(){var
 
a,b;this.$form?(a=this.$form.outerWidth(),b=this.$form.outerHeight(),a&$loading.width(a),b&$loading.height(b),this.$form.hide()):(a=this.$loading.parent().width(),a&$loading.wid
 
th(a)),this.$loading.show()},showForm:function(a){this.$loading.hide(),this.$form.show(),a!==!1&(),this.$div.triggerHandler("show")},error:function(b){var
 

[47/50] [abbrv] hadoop git commit: HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

2018-06-14 Thread xkrogen
HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5d7449d2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5d7449d2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5d7449d2

Branch: refs/heads/HDFS-12943
Commit: 5d7449d2b8bcd0963d172fc30df784279671176f
Parents: 361ffb2
Author: Inigo Goiri 
Authored: Thu Jun 14 13:43:14 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 13:43:14 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d7449d2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index c6139c1..b85a8d8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.namenode.ha.BootstrapStandby;
@@ -104,6 +105,10 @@ public class TestDFSAdminWithHA {
 conf.setInt(
 CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
 500);
+conf.setInt(HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, 0);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, 0);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[32/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, 
Mukul Kumar Singh and Sunil Govindan.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f4c7c911
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f4c7c911
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f4c7c911

Branch: refs/heads/HDFS-12943
Commit: f4c7c91123b1dbb12bcc007047963b04ad46
Parents: 29024a6
Author: Jitendra Pandey 
Authored: Wed Jun 13 00:36:02 2018 -0700
Committer: Jitendra Pandey 
Committed: Wed Jun 13 00:36:02 2018 -0700

--
 LICENSE.txt | 4 +-
 .../server-scm/src/main/webapps/scm/index.html  | 6 +-
 .../main/webapps/router/federationhealth.html   | 6 +-
 hadoop-hdfs-project/hadoop-hdfs/pom.xml | 4 +-
 .../src/main/webapps/datanode/datanode.html | 6 +-
 .../src/main/webapps/hdfs/dfshealth.html| 6 +-
 .../src/main/webapps/hdfs/dfshealth.js  | 8 +-
 .../src/main/webapps/hdfs/explorer.html |10 +-
 .../src/main/webapps/hdfs/explorer.js   |34 +-
 .../src/main/webapps/journal/index.html | 6 +-
 .../src/main/webapps/secondary/status.html  | 6 +-
 .../bootstrap-3.0.2/css/bootstrap-editable.css  |   655 -
 .../bootstrap-3.0.2/css/bootstrap.min.css   | 9 -
 .../fonts/glyphicons-halflings-regular.eot  |   Bin 20290 -> 0 bytes
 .../fonts/glyphicons-halflings-regular.svg  |   229 -
 .../fonts/glyphicons-halflings-regular.ttf  |   Bin 41236 -> 0 bytes
 .../fonts/glyphicons-halflings-regular.woff |   Bin 23292 -> 0 bytes
 .../static/bootstrap-3.0.2/img/clear.png|   Bin 509 -> 0 bytes
 .../static/bootstrap-3.0.2/img/loading.gif  |   Bin 1849 -> 0 bytes
 .../js/bootstrap-editable.min.js| 7 -
 .../static/bootstrap-3.0.2/js/bootstrap.min.js  | 9 -
 .../bootstrap-3.3.7/css/bootstrap-editable.css  |   655 +
 .../bootstrap-3.3.7/css/bootstrap-theme.css |   587 +
 .../bootstrap-3.3.7/css/bootstrap-theme.css.map | 1 +
 .../bootstrap-3.3.7/css/bootstrap-theme.min.css | 6 +
 .../css/bootstrap-theme.min.css.map | 1 +
 .../static/bootstrap-3.3.7/css/bootstrap.css|  6757 
 .../bootstrap-3.3.7/css/bootstrap.css.map   | 1 +
 .../bootstrap-3.3.7/css/bootstrap.min.css   | 6 +
 .../bootstrap-3.3.7/css/bootstrap.min.css.map   | 1 +
 .../fonts/glyphicons-halflings-regular.eot  |   Bin 0 -> 20127 bytes
 .../fonts/glyphicons-halflings-regular.svg  |   288 +
 .../fonts/glyphicons-halflings-regular.ttf  |   Bin 0 -> 45404 bytes
 .../fonts/glyphicons-halflings-regular.woff |   Bin 0 -> 23424 bytes
 .../fonts/glyphicons-halflings-regular.woff2|   Bin 0 -> 18028 bytes
 .../js/bootstrap-editable.min.js| 7 +
 .../static/bootstrap-3.3.7/js/bootstrap.js  |  2377 +++
 .../static/bootstrap-3.3.7/js/bootstrap.min.js  | 7 +
 .../webapps/static/bootstrap-3.3.7/js/npm.js|13 +
 .../src/main/webapps/static/dfs-dust.js | 2 +-
 .../main/webapps/static/jquery-1.10.2.min.js| 6 -
 .../src/main/webapps/static/jquery-3.3.1.min.js | 2 +
 .../src/main/webapps/static/moment.min.js   | 6 +-
 .../test/robotframework/acceptance/ozone.robot  | 4 +-
 .../src/main/webapps/ksm/index.html | 6 +-
 hadoop-ozone/pom.xml| 4 +-
 .../src/main/html/js/thirdparty/jquery.js   | 13607 +
 .../hadoop-yarn/hadoop-yarn-common/pom.xml  | 2 +-
 .../hadoop/yarn/webapp/view/JQueryUI.java   | 2 +-
 .../webapps/static/jquery/jquery-1.8.2.min.js   | 2 -
 .../webapps/static/jquery/jquery-3.3.1.min.js   | 2 +
 .../webapps/static/jt/jquery.jstree.js  |42 +-
 52 files changed, 17883 insertions(+), 7516 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/LICENSE.txt
--
diff --git a/LICENSE.txt b/LICENSE.txt
index 75c5562..f8de86a 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -617,7 +617,7 @@ OTHER DEALINGS IN THE SOFTWARE.
 
 The binary distribution of this product bundles these dependencies under the
 following license:
-hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2
+hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7
 hadoop-tools/hadoop-sls/src/main/html/js/thirdparty/bootstrap.min.js
 hadoop-tools/hadoop-sls/src/main/html/css/bootstrap.min.css
 hadoop-tools/hadoop-sls/src/main/html/css/bootstrap-responsive.min.css
@@ -761,7 +761,7 @@ THE SOFTWARE.
 
 
 For:
-hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js

[35/50] [abbrv] hadoop git commit: HDDS-109. Add reconnect logic for XceiverClientGrpc. Contributed by Lokesh Jain.

2018-06-14 Thread xkrogen
HDDS-109. Add reconnect logic for XceiverClientGrpc.
Contributed by Lokesh Jain.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43baa036
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43baa036
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43baa036

Branch: refs/heads/HDFS-12943
Commit: 43baa036aeb025bcbed1aca19837b072f2c14a6a
Parents: 8e7548d
Author: Anu Engineer 
Authored: Wed Jun 13 09:50:10 2018 -0700
Committer: Anu Engineer 
Committed: Wed Jun 13 09:50:10 2018 -0700

--
 .../hadoop/hdds/scm/XceiverClientGrpc.java  | 23 
 .../ozone/scm/TestXceiverClientManager.java |  6 ++--
 .../hadoop/ozone/web/client/TestKeys.java   | 29 
 3 files changed, 48 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43baa036/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
--
diff --git 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
index c787024..92df46e 100644
--- 
a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
+++ 
b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/XceiverClientGrpc.java
@@ -55,6 +55,7 @@ public class XceiverClientGrpc extends XceiverClientSpi {
   private XceiverClientMetrics metrics;
   private ManagedChannel channel;
   private final Semaphore semaphore;
+  private boolean closed = false;
 
   /**
* Constructs a client that can communicate with the Container framework on
@@ -105,6 +106,7 @@ public class XceiverClientGrpc extends XceiverClientSpi {
 
   @Override
   public void close() {
+closed = true;
 channel.shutdownNow();
 try {
   channel.awaitTermination(60, TimeUnit.MINUTES);
@@ -153,6 +155,14 @@ public class XceiverClientGrpc extends XceiverClientSpi {
   public CompletableFuture
   sendCommandAsync(ContainerCommandRequestProto request)
   throws IOException, ExecutionException, InterruptedException {
+if(closed){
+  throw new IOException("This channel is not connected.");
+}
+
+if(channel == null || !isConnected()) {
+  reconnect();
+}
+
 final CompletableFuture replyFuture =
 new CompletableFuture<>();
 semaphore.acquire();
@@ -192,6 +202,19 @@ public class XceiverClientGrpc extends XceiverClientSpi {
 return replyFuture;
   }
 
+  private void reconnect() throws IOException {
+try {
+  connect();
+} catch (Exception e) {
+  LOG.error("Error while connecting: ", e);
+  throw new IOException(e);
+}
+
+if (channel == null || !isConnected()) {
+  throw new IOException("This channel is not connected.");
+}
+  }
+
   /**
* Create a pipeline.
*

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43baa036/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
--
diff --git 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
index 478cf69..56f3c7a 100644
--- 
a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
+++ 
b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/scm/TestXceiverClientManager.java
@@ -163,8 +163,7 @@ public class TestXceiverClientManager {
 // and any container operations should fail
 clientManager.releaseClient(client1);
 
-String expectedMessage = shouldUseGrpc ? "Channel shutdown invoked" :
-"This channel is not connected.";
+String expectedMessage = "This channel is not connected.";
 try {
   ContainerProtocolCalls.createContainer(client1,
   container1.getContainerID(), traceID1);
@@ -213,8 +212,7 @@ public class TestXceiverClientManager {
 
 // Any container operation should now fail
 String traceID2 = "trace" + RandomStringUtils.randomNumeric(4);
-String expectedMessage = shouldUseGrpc ? "Channel shutdown invoked" :
-"This channel is not connected.";
+String expectedMessage = "This channel is not connected.";
 try {
   ContainerProtocolCalls.createContainer(client1,
   container1.getContainerID(), traceID2);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43baa036/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/web/client/TestKeys.java
--
diff --git 

[36/50] [abbrv] hadoop git commit: HDDS-159. RestClient: Implement list operations for volume, bucket and keys. Contributed by Lokesh Jain.

2018-06-14 Thread xkrogen
HDDS-159. RestClient: Implement list operations for volume, bucket and keys. 
Contributed by Lokesh Jain.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7566e0ec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7566e0ec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7566e0ec

Branch: refs/heads/HDFS-12943
Commit: 7566e0ec5f1aff4cf3c53f4ccc5f3b57fff1e216
Parents: 43baa03
Author: Xiaoyu Yao 
Authored: Wed Jun 13 11:43:18 2018 -0700
Committer: Xiaoyu Yao 
Committed: Wed Jun 13 11:43:18 2018 -0700

--
 .../robotframework/acceptance/ozone-shell.robot |  18 +--
 .../hadoop/ozone/client/rest/RestClient.java| 113 ++-
 .../hadoop/ozone/ozShell/TestOzoneShell.java|  33 +++---
 .../hadoop/ozone/web/client/TestBuckets.java|   3 -
 .../ozone/web/client/TestBucketsRatis.java  |   3 -
 .../hadoop/ozone/web/client/TestVolume.java |  12 +-
 6 files changed, 136 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7566e0ec/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
--
diff --git 
a/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
 
b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
index 7ff4910..1a91a93 100644
--- 
a/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
+++ 
b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
@@ -38,7 +38,7 @@ Scale it up to 5 datanodes
 
 Test ozone shell (RestClient without http port)
 Execute on  datanodeozone oz -createVolume 
http://ksm/hive -user bilbo -quota 100TB -root
-${result} = Execute on  datanodeozone oz -listVolume 
o3://ksm -user bilbo | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.volumeName=="hive")'
+${result} = Execute on  datanodeozone oz -listVolume 
http://ksm -user bilbo | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.volumeName=="hive")'
 Should contain  ${result}   createdOn
 Execute on  datanodeozone oz -updateVolume 
http://ksm/hive -user bill -quota 10TB
 ${result} = Execute on  datanodeozone oz -infoVolume 
http://ksm/hive | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '. | 
select(.volumeName=="hive") | .owner | .name'
@@ -52,7 +52,7 @@ Test ozone shell (RestClient without http port)
 Should Be Equal ${result}   GROUP
 ${result} = Execute on  datanodeozone oz -updateBucket 
http://ksm/hive/bb1 -removeAcl group:samwise:r | grep -Ev 
'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '. | select(.bucketName=="bb1") | 
.acls | .[] | select(.name=="frodo") | .type'
 Should Be Equal ${result}   USER
-${result} = Execute on  datanodeozone oz -listBucket 
o3://ksm/hive/ | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] | 
select(.bucketName=="bb1") | .volumeName'
+${result} = Execute on  datanodeozone oz -listBucket 
http://ksm/hive/ | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] 
| select(.bucketName=="bb1") | .volumeName'
 Should Be Equal ${result}   hive
 Execute on  datanodeozone oz -putKey 
http://ksm/hive/bb1/key1 -file NOTICE.txt
 Execute on  datanoderm -f NOTICE.txt.1
@@ -60,7 +60,7 @@ Test ozone shell (RestClient without http port)
 Execute on  datanodels -l NOTICE.txt.1
 ${result} = Execute on  datanodeozone oz -infoKey 
http://ksm/hive/bb1/key1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq 
-r '. | select(.keyName=="key1")'
 Should contain  ${result}   createdOn
-${result} = Execute on  datanodeozone oz -listKey 
o3://ksm/hive/bb1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] 
| select(.keyName=="key1") | .keyName'
+${result} = Execute on  datanodeozone oz -listKey 
http://ksm/hive/bb1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.keyName=="key1") | .keyName'
 Should Be Equal ${result}   key1
 Execute on  datanodeozone oz -deleteKey 
http://ksm/hive/bb1/key1 -v
 Execute on  datanodeozone oz -deleteBucket 
http://ksm/hive/bb1
@@ -68,7 +68,7 @@ Test ozone shell (RestClient 

[27/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.map
new file mode 100644
index 000..d876f60
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["bootstrap-theme.css","less/theme.less","less/mixins/vendor-prefixes.less","less/mixins/gradients.less","less/mixins/reset-filter.less"],"names":[],"mappings":"GAIG;ACeH;;EAME,yCAAA;EC2CA,4FAAA;EACQ,oFAAA;CFvDT;ACgBCECsCA,yDAAA;EACQ,iDAAA;CFxCT;ACMC;;ECiCA,yBAAA;EACQ,iBAAA;CFnBT;AC/BD;;EAuBI,kBAAA;CDgBH;ACyBC;;EAEE,uBAAA;CDvBH;AC4BD;EErEI,sEAAA;EACA,iEAAA;EACA,2FAAA;EAAA,oEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;EAuC2C,0BAAA;EAA2B,mBAAA;CDjBvE;ACpBC;;EAEE,0BAAA;EACA,6BAAA;CDsBH;ACnBC;;EAEE,0BAAA;EACA,sBAAA;CDqBH;ACfG;;EAME,0BAAA;EACA,uBAAA;CD6BL;ACbD;EEtEI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;CD8DD;AC5DC;;EAEE,0BAAA;EACA,6BAAA;CD8DH;AC3DC;;EAEE,0BAAA;EACA,sBAAA;CD6DH;ACvDG;;EAME,0BAAA;EACA,uBAAA;CDqEL;ACpDD;EEvEI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;CDsGD;ACpGC;;EAEE,0BAAA;EACA,6B
 
AAA;CDsGH;ACnGC;;EAEE,0BAAA;EACA,sBAAA;CDqGH;AC/FG;;EAME,0BAAA;EACA,uBAAA;CD6GL;AC3FD;EExEI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;CD8ID;AC5IC;;EAEE,0BAAA;EACA,6BAAA;CD8IH;AC3IC;;EAEE,0BAAA;EACA,sBAAA;CD6IH;ACvIG;;EAME,0BAAA;EACA,uBAAA;CDqJL;AClID;EEzEI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;CDsLD;ACpLC;;EAEE,0BAAA;EACA,6BAAA;CDsLH;ACnLC;;EAEE,0BAAA;EACA,sBAAA;CDqLH;AC/KG;;EAME,0BAAA;EACA,uBAAA;CD6LL;ACzKD;EE1EI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EAEA,uHAAA;ECnBF,oEAAA;EH4CA,4BAAA;EACA,sBAAA;CD8ND;AC5NC;;EAEE,0BAAA;EACA,6BAAA;CD8NH;AC3NC;;EAEE,0BAAA;EACA,sBAAA;CD6NH;ACvNG;;EAME,0BAAA;EACA,uBAAA;CDqOL;AC1MD;;EClCE,mDAAA;EACQ,2CAAA;CFgPT;ACrMD;;EE3FI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EF0FF,0BAAA;CD2MD;ACzMD;;;EEhGI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EFgGF,0BAAA;CD+MD;ACtMD;EE
 
7GI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;ECnBF,oEAAA;EH+HA,mBAAA;ECjEA,4FAAA;EACQ,oFAAA;CF8QT;ACjND;;EE7GI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;ED2CF,yDAAA;EACQ,iDAAA;CFwRT;AC9MD;;EAEE,+CAAA;CDgND;AC5MD;EEhII,sEAAA;EACA,iEAAA;EACA,2FAAA;EAAA,oEAAA;EACA,4BAAA;EACA,uHAAA;ECnBF,oEAAA;EHkJA,mBAAA;CDkND;ACrND;;EEhII,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;ED2CF,wDAAA;EACQ,gDAAA;CF+ST;AC/ND;;EAYI,0CAAA;CDuNH;AClND;;;EAGE,iBAAA;CDoND;AC/LD;EAfI;;;IAGE,YAAA;IE7JF,yEAAA;IACA,oEAAA;IACA,8FAAA;IAAA,uEAAA;IACA,4BAAA;IACA,uHAAA;GH+WD;CACF;AC3MD;EACE,8CAAA;EC3HA,2FAAA;EACQ,mFAAA;CFyUT;ACnMD;EEtLI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EF8KF,sBAAA;CD+MD;AC1MD;EEvLI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EF8KF,sBAAA;CDuND;ACjND;EExLI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EF8KF,sBAAA;CD+ND;ACxND;EEzLI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EF8K
 
F,sBAAA;CDuOD;ACxND;EEjMI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CH4ZH;ACrND;EE3MI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CHmaH;AC3ND;EE5MI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CH0aH;ACjOD;EE7MI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CHibH;ACvOD;EE9MI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CHwbH;AC7OD;EE/MI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CH+bH;AChPD;EElLI,8MAAA;EACA,yMAAA;EACA,sMAAA;CHqaH;AC5OD;EACE,mBAAA;EC9KA,mDAAA;EACQ,2CAAA;CF6ZT;AC7OD;;;EAGE,8BAAA;EEnOE,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;EFiOF,sBAAA;CDmPD;ACxPD;;;EAQI,kBAAA;CDqPH;AC3OD;ECnME,kDAAA;EACQ,0CAAA;CFibT;ACrOD;EE5PI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CHoeH;AC3OD;EE7PI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CH2eH;ACjPD;EE9PI,yEAAA;EACA,oEAAA;EACA,8FAAA;EAAA,uEAAA;EACA,4BAAA;EACA,uHAAA;CHkfH;ACvPD;EE/PI,yEAA
 

[01/50] [abbrv] hadoop git commit: YARN-8422. TestAMSimulator failing with NPE. Contributed by Giovanni Matteo Fumarola.

2018-06-14 Thread xkrogen
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-12943 9756c2cbe -> 292ccdce8


YARN-8422. TestAMSimulator failing with NPE. Contributed by Giovanni Matteo 
Fumarola.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c3548159
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c3548159
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c3548159

Branch: refs/heads/HDFS-12943
Commit: c35481594ffc372e3f846b0c8ebc2ff9e36ffdb0
Parents: 6e756e8
Author: Inigo Goiri 
Authored: Tue Jun 12 10:59:50 2018 -0700
Committer: Inigo Goiri 
Committed: Tue Jun 12 10:59:50 2018 -0700

--
 .../hadoop/yarn/sls/appmaster/TestAMSimulator.java | 13 -
 1 file changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c3548159/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
--
diff --git 
a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
 
b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
index bfc7d0c..bc8ea70 100644
--- 
a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
+++ 
b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
@@ -49,8 +49,8 @@ public class TestAMSimulator {
   private YarnConfiguration conf;
   private Path metricOutputDir;
 
-  private Class slsScheduler;
-  private Class scheduler;
+  private Class slsScheduler;
+  private Class scheduler;
 
   @Parameterized.Parameters
   public static Collection params() {
@@ -60,7 +60,7 @@ public class TestAMSimulator {
 });
   }
 
-  public TestAMSimulator(Class slsScheduler, Class scheduler) {
+  public TestAMSimulator(Class slsScheduler, Class scheduler) {
 this.slsScheduler = slsScheduler;
 this.scheduler = scheduler;
   }
@@ -115,7 +115,8 @@ public class TestAMSimulator {
   }
 
   private void createMetricOutputDir() {
-Path testDir = Paths.get(System.getProperty("test.build.data"));
+Path testDir =
+Paths.get(System.getProperty("test.build.data", "target/test-dir"));
 try {
   metricOutputDir = Files.createTempDirectory(testDir, "output");
 } catch (IOException e) {
@@ -153,7 +154,9 @@ public class TestAMSimulator {
 
   @After
   public void tearDown() {
-rm.stop();
+if (rm != null) {
+  rm.stop();
+}
 
 deleteMetricOutputDir();
   }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[13/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-ozone/pom.xml
--
diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml
index 5d57e10..cffef14 100644
--- a/hadoop-ozone/pom.xml
+++ b/hadoop-ozone/pom.xml
@@ -143,8 +143,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 src/main/webapps/router/robots.txt
 src/contrib/**
 src/site/resources/images/*
-webapps/static/bootstrap-3.0.2/**
-webapps/static/jquery-1.10.2.min.js
+webapps/static/bootstrap-3.3.7/**
+webapps/static/jquery-3.3.1.min.js
 webapps/static/jquery.dataTables.min.js
 webapps/static/nvd3-1.8.5.min.css.map
 webapps/static/nvd3-1.8.5.min.js


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[24/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
new file mode 100644
index 000..f010c82
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["bootstrap.css","less/normalize.less","less/print.less","less/glyphicons.less","less/scaffolding.less","less/mixins/vendor-prefixes.less","less/mixins/tab-focus.less","less/mixins/image.less","less/type.less","less/mixins/text-emphasis.less","less/mixins/background-variant.less","less/mixins/text-overflow.less","less/code.less","less/grid.less","less/mixins/grid.less","less/mixins/grid-framework.less","less/tables.less","less/mixins/table-row.less","less/forms.less","less/mixins/forms.less","less/buttons.less","less/mixins/buttons.less","less/mixins/opacity.less","less/component-animations.less","less/dropdowns.less","less/mixins/nav-divider.less","less/mixins/reset-filter.less","less/button-groups.less","less/mixins/border-radius.less","less/input-groups.less","less/navs.less","less/navbar.less","less/mixins/nav-vertical-align.less","less/utilities.less","less/breadcrumbs.less","less/pagination.less","less/mixins/pagination.less","less/pager.less","less/labe
 
ls.less","less/mixins/labels.less","less/badges.less","less/jumbotron.less","less/thumbnails.less","less/alerts.less","less/mixins/alerts.less","less/progress-bars.less","less/mixins/gradients.less","less/mixins/progress-bar.less","less/media.less","less/list-group.less","less/mixins/list-group.less","less/panels.less","less/mixins/panels.less","less/responsive-embed.less","less/wells.less","less/close.less","less/modals.less","less/tooltip.less","less/mixins/reset-text.less","less/popovers.less","less/carousel.less","less/mixins/clearfix.less","less/mixins/center-block.less","less/mixins/hide-text.less","less/responsive-utilities.less","less/mixins/responsive-visibility.less"],"names":[],"mappings":"GAIG;AACH,4EAA4E;ACG5E;EACE,wBAAA;EACA,2BAAA;EACA,+BAAA;CDDD;ACQD;EACE,UAAA;CDND;ACmBD;EAaE,eAAA;CDjBD;ACyBDEAIE,sBAAA;EACA,yBAAA;CDvBD;AC+BD;EACE,cAAA;EACA,UAAA;CD7BD;ACqCD;;EAEE,cAAA;CDnCD;AC6CD;EACE,8BAAA;CD3CD;ACmDD;;EAEE,WAAA;CDjDD;AC2DD;EACE,0BAAA;CDzDD;ACg
 
ED;;EAEE,kBAAA;CD9DD;ACqED;EACE,mBAAA;CDnED;AC2ED;EACE,eAAA;EACA,iBAAA;CDzED;ACgFD;EACE,iBAAA;EACA,YAAA;CD9ED;ACqFD;EACE,eAAA;CDnFD;AC0FD;;EAEE,eAAA;EACA,eAAA;EACA,mBAAA;EACA,yBAAA;CDxFD;AC2FD;EACE,YAAA;CDzFD;AC4FD;EACE,gBAAA;CD1FD;ACoGD;EACE,UAAA;CDlGD;ACyGD;EACE,iBAAA;CDvGD;ACiHD;EACE,iBAAA;CD/GD;ACsHD;EACE,gCAAA;KAAA,6BAAA;UAAA,wBAAA;EACA,UAAA;CDpHD;AC2HD;EACE,eAAA;CDzHD;ACgIDEAIE,kCAAA;EACA,eAAA;CD9HD;ACgJD;EAKE,eAAA;EACA,cAAA;EACA,UAAA;CD9ID;ACqJD;EACE,kBAAA;CDnJD;AC6JD;;EAEE,qBAAA;CD3JD;ACsKDEAIE,2BAAA;EACA,gBAAA;CDpKD;AC2KD;;EAEE,gBAAA;CDzKD;ACgLD;;EAEE,UAAA;EACA,WAAA;CD9KD;ACsLD;EACE,oBAAA;CDpLD;AC+LD;;EAEE,+BAAA;KAAA,4BAAA;UAAA,uBAAA;EACA,WAAA;CD7LD;ACsMD;;EAEE,aAAA;CDpMD;AC4MD;EACE,8BAAA;EACA,gCAAA;KAAA,6BAAA;UAAA,wBAAA;CD1MD;ACmND;;EAEE,yBAAA;CDjND;ACwND;EACE,0BAAA;EACA,cAAA;EACA,+BAAA;CDtND;AC8ND;EACE,UAAA;EACA,WAAA;CD5ND;ACmOD;EACE,eAAA;CDjOD;ACyOD;EACE,kBAAA;CDvOD;ACiPD;EACE,0BAAA;EACA,kBAAA;CD/OD;ACkPD;;EAEE,WAAA;CDhPD;AACD,qFAAqF;AElFrF;EA7FI;;;IAGI,mCAAA
 
;IACA,uBAAA;IACA,oCAAA;YAAA,4BAAA;IACA,6BAAA;GFkLL;EE/KC;;IAEI,2BAAA;GFiLL;EE9KC;IACI,6BAAA;GFgLL;EE7KC;IACI,8BAAA;GF+KL;EE1KC;;IAEI,YAAA;GF4KL;EEzKC;;IAEI,uBAAA;IACA,yBAAA;GF2KL;EExKC;IACI,4BAAA;GF0KL;EEvKC;;IAEI,yBAAA;GFyKL;EEtKC;IACI,2BAAA;GFwKL;EErKC;;;IAGI,WAAA;IACA,UAAA;GFuKL;EEpKC;;IAEI,wBAAA;GFsKL;EEhKC;IACI,cAAA;GFkKL;EEhKC;;IAGQ,kCAAA;GFiKT;EE9JC;IACI,uBAAA;GFgKL;EE7JC;IACI,qCAAA;GF+JL;EEhKC;;IAKQ,kCAAA;GF+JT;EE5JC;;IAGQ,kCAAA;GF6JT;CACF;AGnPD;EACE,oCAAA;EACA,sDAAA;EACA,gYAAA;CHqPD;AG7OD;EACE,mBAAA;EACA,SAAA;EACA,sBAAA;EACA,oCAAA;EACA,mBAAA;EACA,oBAAA;EACA,eAAA;EACA,oCAAA;EACA,mCAAA;CH+OD;AG3OmC;EAAW,iBAAA;CH8O9C;AG7OmC;EAAW,iBAAA;CHgP9C;AG9OmC;;EAAW,iBAAA;CHkP9C;AGjPmC;EAAW,iBAAA;CHoP9C;AGnPmC;EAAW,iBAAA;CHsP9C;AGrPmC;EAAW,iBAAA;CHwP9C;AGvPmC;EAAW,iBAAA;CH0P9C;AGzPmC;EAAW,iBAAA;CH4P9C;AG3PmC;EAAW,iBAAA;CH8P9C;AG7PmC;EAAW,iBAAA;CHgQ9C;AG/PmC;EAAW,iBAAA;CHkQ9C;AGjQmC;EAAW,iBAAA;CHoQ9C;AGnQmC;EAAW,iBAAA;CHsQ9C;AGrQmC;EAAW,iBAAA;CHwQ9C;AGvQmC;EAAW,iBAAA;CH0Q9C;AGzQmC;EAAW,iBA
 

[30/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
deleted file mode 100644
index 423bd5d..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
deleted file mode 100644
index 4469488..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
+++ /dev/null
@@ -1,229 +0,0 @@
-
-http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd; >
-http://www.w3.org/2000/svg;>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
deleted file mode 100644
index a498ef4..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
deleted file mode 100644
index d83c539..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
deleted file mode 100644
index 580b52a..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
deleted file mode 100644
index 5b33f7e..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
 and /dev/null differ


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[16/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread xkrogen
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
deleted file mode 100644
index da41706..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery v1.10.2 | (c) 2005, 2013 jQuery Foundation, Inc. | 
jquery.org/license
-//@ sourceMappingURL=jquery-1.10.2.min.map
-*/
-(function(e,t){var n,r,i=typeof 
t,o=e.location,a=e.document,s=a.documentElement,l=e.jQuery,u=e.$,c={},p=[],f="1.10.2",d=p.concat,h=p.push,g=p.slice,m=p.indexOf,y=c.toString,v=c.hasOwnProperty,b=f.trim,x=function(e,t){return
 new 
x.fn.init(e,t,r)},w=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,T=/\S+/g,C=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,k=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,E=/^[\],:{}\s]*$/,S=/(?:^|:|,)(?:\s*\[)+/g,A=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,j=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,D=/^-ms-/,L=/-([\da-z])/gi,H=function(e,t){return
 
t.toUpperCase()},q=function(e){(a.addEventListener||"load"===e.type||"complete"===a.readyState)&&(_(),x.ready())},_=function(){a.addEventListener?(a.removeEventListener("DOMContentLoaded",q,!1),e.removeEventListener("load",q,!1)):(a.detachEvent("onreadystatechange",q),e.detachEvent("onload",q))};x.fn=x.prototype={jquery:f,constructor:x,init:function(e,n,r){var
 i,o;if(!e)return this;
 if("string"==typeof 
e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&>=3?[null,e,null]:N.exec(e),!i||!i[1]&)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n
 instanceof 
x?n[0]:n,x.merge(this,x.parseHTML(i[1],n&?n.ownerDocument||n:a,!0)),k.test(i[1])&(n))for(i
 in n)x.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return 
this}if(o=a.getElementById(i[2]),o&){if(o.id!==i[2])return 
r.find(e);this.length=1,this[0]=o}return 
this.context=a,this.selector=e,this}return 
e.nodeType?(this.context=this[0]=e,this.length=1,this):x.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),x.makeArray(e,this))},selector:"",length:0,toArray:function(){return
 g.call(this)},get:function(e){return 
null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var
 t=x.merge(this.constructor(),e);return 
t.prevObject=this,t.context=this.context,t},each:function(e,t){retur
 n x.each(this,e,t)},ready:function(e){return 
x.ready.promise().done(e),this},slice:function(){return 
this.pushStack(g.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(0>e?t:0);return 
this.pushStack(n>=0&>n?[this[n]]:[])},map:function(e){return 
this.pushStack(x.map(this,function(t,n){return 
e.call(t,n,t)}))},end:function(){return 
this.prevObject||this.constructor(null)},push:h,sort:[].sort,splice:[].splice},x.fn.init.prototype=x.fn,x.extend=x.fn.extend=function(){var
 
e,n,r,i,o,a,s=arguments[0]||{},l=1,u=arguments.length,c=!1;for("boolean"==typeof
 s&&(c=s,s=arguments[1]||{},l=2),"object"==typeof 
s||x.isFunction(s)||(s={}),u===l&&(s=this,--l);u>l;l++)if(null!=(o=arguments[l]))for(i
 in 
o)e=s[i],r=o[i],s!==r&&(c&&&(x.isPlainObject(r)||(n=x.isArray(r)))?(n?(n=!1,a=e&(e)?e:[]):a=e&(e)?e:{},s[i]=x.extend(c,a,r)):r!==t&&(s[i]=r));return
 s},x.extend({expando:"jQuery"+(f+Math.ran
 dom()).replace(/\D/g,""),noConflict:function(t){return 
e.$===x&&(e.$=u),t&===x&&(e.jQuery=l),x},isReady:!1,readyWait:1,holdReady:function(e){e?x.readyWait++:x.ready(!0)},ready:function(e){if(e===!0?!--x.readyWait:!x.isReady){if(!a.body)return
 
setTimeout(x.ready);x.isReady=!0,e!==!0&&--x.readyWait>0||(n.resolveWith(a,[x]),x.fn.trigger&(a).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===x.type(e)},isArray:Array.isArray||function(e){return"array"===x.type(e)},isWindow:function(e){return
 
null!=e&==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&(e)},type:function(e){return
 null==e?e+"":"object"==typeof e||"function"==typeof 
e?c[y.call(e)]||"object":typeof e},isPlainObject:function(e){var 
n;if(!e||"object"!==x.type(e)||e.nodeType||x.isWindow(e))return!1;try{if(e.constructor&&!v.call(e,"constructor")&&!v.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(r){return!1}if(x.support.ownLast)for(n
 in e)return v.call(e,n);fo
 r(n in e);return n===t||v.call(e,n)},isEmptyObject:function(e){var t;for(t in 
e)return!1;return!0},error:function(e){throw 
Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return 
null;"boolean"==typeof t&&(n=t,t=!1),t=t||a;var r=k.exec(e),i=!n&&[];return 

[41/50] [abbrv] hadoop git commit: YARN-8155. Improve ATSv2 client logging in RM and NM publisher. Contributed by Abhishek Modi.

2018-06-14 Thread xkrogen
YARN-8155. Improve ATSv2 client logging in RM and NM publisher. Contributed by 
Abhishek Modi.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9119b3cf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9119b3cf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9119b3cf

Branch: refs/heads/HDFS-12943
Commit: 9119b3cf8f883aa2d5df534afc0c50249fed03c6
Parents: ddd09d5
Author: Rohith Sharma K S 
Authored: Thu Jun 14 12:38:10 2018 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jun 14 12:38:10 2018 +0530

--
 .../timelineservice/NMTimelinePublisher.java| 42 +---
 .../metrics/TimelineServiceV2Publisher.java |  8 +++-
 .../collector/TimelineCollectorWebService.java  | 19 ++---
 3 files changed, 56 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9119b3cf/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
index f451726..cbf3e5e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
@@ -190,9 +190,20 @@ public class NMTimelinePublisher extends CompositeService {
   LOG.error("Seems like client has been removed before the container"
   + " metric could be published for " + 
container.getContainerId());
 }
-  } catch (IOException | YarnException e) {
+  } catch (IOException e) {
 LOG.error("Failed to publish Container metrics for container "
-+ container.getContainerId(), e);
++ container.getContainerId());
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Failed to publish Container metrics for container "
+  + container.getContainerId(), e);
+}
+  } catch (YarnException e) {
+LOG.error("Failed to publish Container metrics for container "
++ container.getContainerId(), e.getMessage());
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Failed to publish Container metrics for container "
+  + container.getContainerId(), e);
+}
   }
 }
   }
@@ -284,9 +295,20 @@ public class NMTimelinePublisher extends CompositeService {
 LOG.error("Seems like client has been removed before the event could 
be"
 + " published for " + container.getContainerId());
   }
-} catch (IOException | YarnException e) {
+} catch (IOException e) {
+  LOG.error("Failed to publish Container metrics for container "
+  + container.getContainerId());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Failed to publish Container metrics for container "
++ container.getContainerId(), e);
+  }
+} catch (YarnException e) {
   LOG.error("Failed to publish Container metrics for container "
-  + container.getContainerId(), e);
+  + container.getContainerId(), e.getMessage());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Failed to publish Container metrics for container "
++ container.getContainerId(), e);
+  }
 }
   }
 
@@ -315,8 +337,16 @@ public class NMTimelinePublisher extends CompositeService {
 LOG.error("Seems like client has been removed before the entity "
 + "could be published for " + entity);
   }
-} catch (Exception e) {
-  LOG.error("Error when publishing entity " + entity, e);
+} catch (IOException e) {
+  LOG.error("Error when publishing entity " + entity);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Error when publishing entity " + entity, e);
+  }
+} catch (YarnException e) {
+  LOG.error("Error when publishing entity " + entity, e.getMessage());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Error when publishing entity " + entity, e);
+  }
 }
   }
 


[04/50] [abbrv] hadoop git commit: HADOOP-15532. TestBasicDiskValidator fails with NoSuchFileException. Contributed by Giovanni Matteo Fumarola.

2018-06-14 Thread xkrogen
HADOOP-15532. TestBasicDiskValidator fails with NoSuchFileException. 
Contributed by Giovanni Matteo Fumarola.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aeaf9fec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aeaf9fec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aeaf9fec

Branch: refs/heads/HDFS-12943
Commit: aeaf9fec62f10699d1c809d66444520fe4533c2c
Parents: 04b74ed
Author: Inigo Goiri 
Authored: Tue Jun 12 14:16:14 2018 -0700
Committer: Inigo Goiri 
Committed: Tue Jun 12 14:16:14 2018 -0700

--
 .../src/test/java/org/apache/hadoop/util/TestDiskChecker.java  | 6 --
 1 file changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aeaf9fec/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index 6b6c6c8..e92c9ed 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -137,7 +137,8 @@ public class TestDiskChecker {
* @throws java.io.IOException if any
*/
   protected File createTempFile() throws java.io.IOException {
-File testDir = new File(System.getProperty("test.build.data"));
+File testDir =
+new File(System.getProperty("test.build.data", "target/test-dir"));
 return Files.createTempFile(testDir.toPath(), "test", "tmp").toFile();
   }
 
@@ -147,7 +148,8 @@ public class TestDiskChecker {
* @throws java.io.IOException if any
*/
   protected File createTempDir() throws java.io.IOException {
-File testDir = new File(System.getProperty("test.build.data"));
+File testDir =
+new File(System.getProperty("test.build.data", "target/test-dir"));
 return Files.createTempDirectory(testDir.toPath(), "test").toFile();
   }
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[46/50] [abbrv] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread xkrogen
YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/361ffb26
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/361ffb26
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/361ffb26

Branch: refs/heads/HDFS-12943
Commit: 361ffb26bebf2491bbe3219ef4a83eb753660018
Parents: 9591765
Author: Bharat Viswanadham 
Authored: Thu Jun 14 13:14:25 2018 -0700
Committer: Bharat Viswanadham 
Committed: Thu Jun 14 13:14:25 2018 -0700

--
 .../hadoop-yarn/hadoop-yarn-common/pom.xml |  2 +-
 .../org/apache/hadoop/yarn/webapp/view/JQueryUI.java   |  2 +-
 .../static/jquery/jquery-ui-1.12.1.custom.min.js   | 13 +
 .../static/jquery/jquery-ui-1.9.1.custom.min.js|  6 --
 4 files changed, 15 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index af1440a..eddcbaa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -242,7 +242,7 @@
 src/main/resources/webapps/static/dt-1.9.4/images/Sorting 
icons.psd
 
src/main/resources/webapps/static/dt-1.9.4/js/jquery.dataTables.min.js
 
src/main/resources/webapps/static/jt/jquery.jstree.js
-
src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
+
src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
 
src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
src/main/resources/webapps/static/jquery/themes-1.9.1/base/jquery-ui.css
 
src/test/resources/application_1440536969523_0001.har/_index

http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
index d4fba1f..91e5f89 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
@@ -68,7 +68,7 @@ public class JQueryUI extends HtmlBlock {
 html.link(root_url("static/jquery/themes-1.9.1/base/jquery-ui.css"))
 .link(root_url("static/dt-1.9.4/css/jui-dt.css"))
 .script(root_url("static/jquery/jquery-3.3.1.min.js"))
-.script(root_url("static/jquery/jquery-ui-1.9.1.custom.min.js"))
+.script(root_url("static/jquery/jquery-ui-1.12.1.custom.min.js"))
 .script(root_url("static/dt-1.9.4/js/jquery.dataTables.min.js"))
 .script(root_url("static/yarn.dt.plugins.js"))
 .script(root_url("static/dt-sorting/natural.js"))


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDDS-146. Refactor the structure of the acceptance tests. Contributed by Elek, Marton.

2018-06-14 Thread aengineer
Repository: hadoop
Updated Branches:
  refs/heads/trunk 5d7449d2b -> 020dd6198


HDDS-146. Refactor the structure of the acceptance tests.
Contributed by  Elek, Marton.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/020dd619
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/020dd619
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/020dd619

Branch: refs/heads/trunk
Commit: 020dd61988b1d47971e328174135d54baf5d41aa
Parents: 5d7449d
Author: Anu Engineer 
Authored: Thu Jun 14 14:14:24 2018 -0700
Committer: Anu Engineer 
Committed: Thu Jun 14 14:14:24 2018 -0700

--
 .../dev-support/bin/robot-all.sh|   2 +-
 .../dev-support/bin/robot-dnd-all.sh|  63 +
 .../acceptance-test/dev-support/bin/robot.sh|   9 +-
 .../dev-support/docker/Dockerfile   |  21 ++
 .../dev-support/docker/docker-compose.yaml  |  23 ++
 hadoop-ozone/acceptance-test/pom.xml|   1 +
 .../src/test/acceptance/basic/.env  |  17 ++
 .../src/test/acceptance/basic/basic.robot   |  50 
 .../test/acceptance/basic/docker-compose.yaml   |  62 +
 .../src/test/acceptance/basic/docker-config |  38 +++
 .../src/test/acceptance/basic/ozone-shell.robot |  85 ++
 .../src/test/acceptance/commonlib.robot |  79 ++
 .../src/test/acceptance/ozonefs/.env|  17 ++
 .../acceptance-test/src/test/compose/.env   |  17 --
 .../src/test/compose/docker-compose.yaml|  62 -
 .../src/test/compose/docker-config  |  36 ---
 .../robotframework/acceptance/ozone-shell.robot | 256 ---
 .../test/robotframework/acceptance/ozone.robot  | 104 
 start-build-env.sh  |   8 +-
 19 files changed, 467 insertions(+), 483 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/020dd619/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
--
diff --git a/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh 
b/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
index 0e212a2..ee9c6b8 100755
--- a/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
+++ b/hadoop-ozone/acceptance-test/dev-support/bin/robot-all.sh
@@ -15,4 +15,4 @@
 # limitations under the License.
 
 DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-$DIR/robot.sh $DIR/../../src/test/robotframework/acceptance
\ No newline at end of file
+$DIR/robot.sh $DIR/../../src/test/acceptance

http://git-wip-us.apache.org/repos/asf/hadoop/blob/020dd619/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
--
diff --git a/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh 
b/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
new file mode 100755
index 000..9f1d367
--- /dev/null
+++ b/hadoop-ozone/acceptance-test/dev-support/bin/robot-dnd-all.sh
@@ -0,0 +1,63 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -x
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+#Dir od the definition of the dind based test exeucution container
+DOCKERDIR="$DIR/../docker"
+
+#Dir to save the results
+TARGETDIR="$DIR/../../target/dnd"
+
+#Dir to mount the distribution from
+OZONEDIST="$DIR/../../../../hadoop-dist/target/ozone"
+
+#Name and imagename of the temporary, dind based test containers
+DOCKER_IMAGE_NAME=ozoneacceptance
+DOCKER_INSTANCE_NAME="${DOCKER_INSTANCE_NAME:-ozoneacceptance}"
+
+teardown() {
+   docker stop "$DOCKER_INSTANCE_NAME"
+}
+
+trap teardown EXIT
+
+#Make sure it will work even if the ozone is built by an other user. We 
+# eneable to run the distribution by an other user
+mkdir -p "$TARGETDIR"
+mkdir -p "$OZONEDIST/logs"
+chmod o+w "$OZONEDIST/logs" || true
+chmod -R o+w "$OZONEDIST/etc/hadoop" || true
+chmod o+w "$OZONEDIST" || true
+
+rm "$TARGETDIR/docker-compose.log"
+docker rm "$DOCKER_INSTANCE_NAME" || true
+docker build -t 

[2/2] hadoop git commit: HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

(cherry picked from commit fcd7ae27452db0ca681cb05bafc2a32e2248d2c0)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e7503236
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e7503236
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e7503236

Branch: refs/heads/branch-2.9
Commit: e75032362a1c99618179c0cc612a385eaa080299
Parents: 502b263
Author: Inigo Goiri 
Authored: Thu Jun 14 14:12:54 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 14:13:24 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e7503236/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index 06722e0..0874f0e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
@@ -101,6 +102,10 @@ public class TestDFSAdminWithHA {
 conf.setInt(
 CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
 500);
+conf.setInt(HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, 0);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, 0);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[1/2] hadoop git commit: HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 06c87edf5 -> fcd7ae274
  refs/heads/branch-2.9 502b26384 -> e75032362


HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fcd7ae27
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fcd7ae27
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fcd7ae27

Branch: refs/heads/branch-2
Commit: fcd7ae27452db0ca681cb05bafc2a32e2248d2c0
Parents: 06c87ed
Author: Inigo Goiri 
Authored: Thu Jun 14 14:12:54 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 14:12:54 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd7ae27/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index 06722e0..0874f0e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
@@ -101,6 +102,10 @@ public class TestDFSAdminWithHA {
 conf.setInt(
 CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
 500);
+conf.setInt(HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, 0);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, 0);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[1/2] hadoop git commit: HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.1 d69c2e786 -> fd7cd596b
  refs/heads/trunk 361ffb26b -> 5d7449d2b


HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5d7449d2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5d7449d2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5d7449d2

Branch: refs/heads/trunk
Commit: 5d7449d2b8bcd0963d172fc30df784279671176f
Parents: 361ffb2
Author: Inigo Goiri 
Authored: Thu Jun 14 13:43:14 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 13:43:14 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d7449d2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index c6139c1..b85a8d8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.namenode.ha.BootstrapStandby;
@@ -104,6 +105,10 @@ public class TestDFSAdminWithHA {
 conf.setInt(
 CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
 500);
+conf.setInt(HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, 0);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, 0);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/2] hadoop git commit: HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13675. Speed up TestDFSAdminWithHA. Contributed by Lukas Majercak.

(cherry picked from commit 5d7449d2b8bcd0963d172fc30df784279671176f)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fd7cd596
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fd7cd596
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fd7cd596

Branch: refs/heads/branch-3.1
Commit: fd7cd596b768f1fe76d93f477d4d8a4736e91644
Parents: d69c2e7
Author: Inigo Goiri 
Authored: Thu Jun 14 13:43:14 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 13:43:51 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java   | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fd7cd596/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index c6139c1..b85a8d8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
 import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
 import org.apache.hadoop.hdfs.server.namenode.ha.BootstrapStandby;
@@ -104,6 +105,10 @@ public class TestDFSAdminWithHA {
 conf.setInt(
 CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
 500);
+conf.setInt(HdfsClientConfigKeys.Failover.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Retry.MAX_ATTEMPTS_KEY, 2);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_BASE_KEY, 0);
+conf.setInt(HdfsClientConfigKeys.Failover.SLEEPTIME_MAX_KEY, 0);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/2] hadoop git commit: HDDS-156. Implement HDDSVolume to manage volume state

2018-06-14 Thread hanishakoneru
HDDS-156. Implement HDDSVolume to manage volume state


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9a5552bf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9a5552bf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9a5552bf

Branch: refs/heads/HDDS-48
Commit: 9a5552bf762880c38a233597b7c6e9ea09441108
Parents: 418cff4
Author: Hanisha Koneru 
Authored: Thu Jun 14 13:28:41 2018 -0700
Committer: Hanisha Koneru 
Committed: Thu Jun 14 13:28:41 2018 -0700

--
 .../org/apache/hadoop/ozone/OzoneConsts.java|   5 +
 .../container/common/DataNodeLayoutVersion.java |  80 +
 .../common/helpers/DatanodeVersionFile.java |  95 ++
 .../impl/RoundRobinVolumeChoosingPolicy.java|  82 -
 .../ozone/container/common/impl/VolumeInfo.java | 162 -
 .../ozone/container/common/impl/VolumeSet.java  | 251 --
 .../container/common/impl/VolumeUsage.java  | 189 ---
 .../common/interfaces/VolumeChoosingPolicy.java |   4 +-
 .../container/common/utils/HddsVolumeUtil.java  | 163 +
 .../container/common/volume/HddsVolume.java | 330 +++
 .../volume/RoundRobinVolumeChoosingPolicy.java  |  83 +
 .../container/common/volume/VolumeInfo.java | 132 
 .../container/common/volume/VolumeSet.java  | 309 +
 .../container/common/volume/VolumeUsage.java| 198 +++
 .../container/common/volume/package-info.java   |  21 ++
 .../common/TestDatanodeLayOutVersion.java   |  38 +++
 .../common/helpers/TestDatanodeVersionFile.java | 134 
 .../TestRoundRobinVolumeChoosingPolicy.java | 100 --
 .../common/interfaces/TestVolumeSet.java| 149 -
 .../container/common/volume/TestHddsVolume.java | 145 
 .../TestRoundRobinVolumeChoosingPolicy.java | 131 
 .../container/common/volume/TestVolumeSet.java  | 157 +
 22 files changed, 2023 insertions(+), 935 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a5552bf/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
--
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
index c40dc8e..36f830b 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConsts.java
@@ -33,6 +33,11 @@ public final class OzoneConsts {
   public static final String OZONE_SIMPLE_ROOT_USER = "root";
   public static final String OZONE_SIMPLE_HDFS_USER = "hdfs";
 
+  public static final String STORAGE_ID = "storageID";
+  public static final String DATANODE_UUID = "datanodeUuid";
+  public static final String CLUSTER_ID = "clusterID";
+  public static final String LAYOUTVERSION = "layOutVersion";
+  public static final String CTIME = "ctime";
   /*
* BucketName length is used for both buckets and volume lengths
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a5552bf/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/DataNodeLayoutVersion.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/DataNodeLayoutVersion.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/DataNodeLayoutVersion.java
new file mode 100644
index 000..2d58c39
--- /dev/null
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/DataNodeLayoutVersion.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.container.common;
+
+/**
+ * Datanode layout version which describes information about the layout version
+ * on the datanode.
+ */
+public final class DataNodeLayoutVersion {
+
+  // We will just be 

[1/2] hadoop git commit: HDDS-156. Implement HDDSVolume to manage volume state

2018-06-14 Thread hanishakoneru
Repository: hadoop
Updated Branches:
  refs/heads/HDDS-48 418cff482 -> 9a5552bf7


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a5552bf/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestDatanodeVersionFile.java
--
diff --git 
a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestDatanodeVersionFile.java
 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestDatanodeVersionFile.java
new file mode 100644
index 000..5889222
--- /dev/null
+++ 
b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/helpers/TestDatanodeVersionFile.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.ozone.container.common.helpers;
+
+import org.apache.hadoop.ozone.common.InconsistentStorageStateException;
+import org.apache.hadoop.ozone.container.common.DataNodeLayoutVersion;
+import org.apache.hadoop.ozone.container.common.utils.HddsVolumeUtil;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.Time;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+import java.util.UUID;
+
+import static org.junit.Assert.*;
+
+/**
+ * This class tests {@link DatanodeVersionFile}.
+ */
+public class TestDatanodeVersionFile {
+
+  private File versionFile;
+  private DatanodeVersionFile dnVersionFile;
+  private Properties properties;
+
+  private String storageID;
+  private String clusterID;
+  private String datanodeUUID;
+  private long cTime;
+  private int lv;
+
+  @Rule
+  public TemporaryFolder folder= new TemporaryFolder();
+
+  @Before
+  public void setup() throws IOException {
+versionFile = folder.newFile("Version");
+storageID = UUID.randomUUID().toString();
+clusterID = UUID.randomUUID().toString();
+datanodeUUID = UUID.randomUUID().toString();
+cTime = Time.now();
+lv = DataNodeLayoutVersion.getLatestVersion().getVersion();
+
+dnVersionFile = new DatanodeVersionFile(
+storageID, clusterID, datanodeUUID, cTime, lv);
+
+dnVersionFile.createVersionFile(versionFile);
+
+properties = dnVersionFile.readFrom(versionFile);
+  }
+
+  @Test
+  public void testCreateAndReadVersionFile() throws IOException{
+
+//Check VersionFile exists
+assertTrue(versionFile.exists());
+
+assertEquals(storageID, HddsVolumeUtil.getStorageID(
+properties, versionFile));
+assertEquals(clusterID, HddsVolumeUtil.getClusterID(
+properties, versionFile, clusterID));
+assertEquals(datanodeUUID, HddsVolumeUtil.getDatanodeUUID(
+properties, versionFile, datanodeUUID));
+assertEquals(cTime, HddsVolumeUtil.getCreationTime(
+properties, versionFile));
+assertEquals(lv, HddsVolumeUtil.getLayOutVersion(
+properties, versionFile));
+  }
+
+  @Test
+  public void testIncorrectClusterId() throws IOException{
+try {
+  String randomClusterID = UUID.randomUUID().toString();
+  HddsVolumeUtil.getClusterID(properties, versionFile,
+  randomClusterID);
+  fail("Test failure in testIncorrectClusterId");
+} catch (InconsistentStorageStateException ex) {
+  GenericTestUtils.assertExceptionContains("Mismatched ClusterIDs", ex);
+}
+  }
+
+  @Test
+  public void testVerifyCTime() throws IOException{
+long invalidCTime = -10;
+dnVersionFile = new DatanodeVersionFile(
+storageID, clusterID, datanodeUUID, invalidCTime, lv);
+dnVersionFile.createVersionFile(versionFile);
+properties = dnVersionFile.readFrom(versionFile);
+
+try {
+  HddsVolumeUtil.getCreationTime(properties, versionFile);
+  fail("Test failure in testVerifyCTime");
+} catch (InconsistentStorageStateException ex) {
+  GenericTestUtils.assertExceptionContains("Invalid Creation time in " +
+  "Version File : " + versionFile, ex);
+}
+  }
+
+  @Test
+  public void testVerifyLayOut() throws IOException{
+ 

[1/3] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread bharat
Repository: hadoop
Updated Branches:
  refs/heads/trunk 959176504 -> 361ffb26b


http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
deleted file mode 100644
index aa7a923..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery UI - v1.9.1 - 2012-10-25
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, 
jquery.ui.position.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, 
jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, 
jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.effect.js, 
jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, 
jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, 
jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, 
jquery.ui.effect-fold.js, jquery.ui.effect-highlight.js, 
jquery.ui.effect-pulsate.js, jquery.ui.effect-scale.js, 
jquery.ui.effect-shake.js, jquery.ui.effect-slide.js, 
jquery.ui.effect-transfer.js, jquery.ui.menu.js, jquery.ui.progressbar.js, 
jquery.ui.resizable.js, jquery.ui.selectable.js, jquery.ui.slider.js, 
jquery.ui.sortable.js, jquery.ui.spinner.js, jquery.ui.tabs.js, 
jquery.ui.tooltip.js
-* Copyright (c) 2012 jQuery Foundation and other contributors Licensed MIT */
-
-(function(e,t){function i(t,n){var 
r,i,o,u=t.nodeName.toLowerCase();return"area"===u?(r=t.parentNode,i=r.name,!t.href||!i||r.nodeName.toLowerCase()!=="map"?!1:(o=e("img[usemap=#"+i+"]")[0],!!o&(o))):(/input|select|textarea|button|object/.test(u)?!t.disabled:"a"===u?t.href||n:n)&(t)}function
 s(t){return 
e.expr.filters.visible(t)&&!e(t).parents().andSelf().filter(function(){return 
e.css(this,"visibility")==="hidden"}).length}var 
n=0,r=/^ui-id-\d+$/;e.ui=e.ui||{};if(e.ui.version)return;e.extend(e.ui,{version:"1.9.1",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,NUMPAD_ADD:107,NUMPAD_DECIMAL:110,NUMPAD_DIVIDE:111,NUMPAD_ENTER:108,NUMPAD_MULTIPLY:106,NUMPAD_SUBTRACT:109,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({_focus:e.fn.focus,focus:function(t,n){return
 typeof t=="number"?this.each(function(){var 
r=this;setTimeout(function(){e(r).focus(),n&(r)},t)}):this._focus.apply(this,arguments)},scrollPa
 rent:function(){var t;return 
e.ui.ie&&/(static|relative)/.test(this.css("position"))||/absolute/.test(this.css("position"))?t=this.parents().filter(function(){return/(relative|absolute|fixed)/.test(e.css(this,"position"))&&/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0):t=this.parents().filter(function(){return/(auto|scroll)/.test(e.css(this,"overflow")+e.css(this,"overflow-y")+e.css(this,"overflow-x"))}).eq(0),/fixed/.test(this.css("position"))||!t.length?e(document):t},zIndex:function(n){if(n!==t)return
 this.css("zIndex",n);if(this.length){var 
r=e(this[0]),i,s;while(r.length&[0]!==document){i=r.css("position");if(i==="absolute"||i==="relative"||i==="fixed"){s=parseInt(r.css("zIndex"),10);if(!isNaN(s)&!==0)return
 s}r=r.parent()}}return 0},uniqueId:function(){return 
this.each(function(){this.id||(this.id="ui-id-"+ 
++n)})},removeUniqueId:function(){return 
this.each(function(){r.test(this.id)&(this).removeAttr("id")})}}),e("
 ").outerWidth(1).jquery||e.each(["Width","Height"],function(n,r){function 
u(t,n,r,s){return 
e.each(i,function(){n-=parseFloat(e.css(t,"padding"+this))||0,r&&(n-=parseFloat(e.css(t,"border"+this+"Width"))||0),s&&(n-=parseFloat(e.css(t,"margin"+this))||0)}),n}var
 
i=r==="Width"?["Left","Right"]:["Top","Bottom"],s=r.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn["inner"+r]=function(n){return
 
n===t?o["inner"+r].call(this):this.each(function(){e(this).css(s,u(this,n)+"px")})},e.fn["outer"+r]=function(t,n){return
 typeof 
t!="number"?o["outer"+r].call(this,t):this.each(function(){e(this).css(s,u(this,t,!0,n)+"px")})}}),e.extend(e.expr[":"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return
 
function(n){return!!e.data(n,t)}}):function(t,n,r){return!!e.data(t,r[3])},focusable:function(t){return
 i(t,!isNaN(e.attr(t,"tabindex")))},tabbable:function(t){var 
n=e.attr(t,"tabindex"),r=isNaN(n);retu
 rn(r||n>=0)&(t,!r)}}),e(function(){var 

[3/3] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread bharat
YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/361ffb26
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/361ffb26
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/361ffb26

Branch: refs/heads/trunk
Commit: 361ffb26bebf2491bbe3219ef4a83eb753660018
Parents: 9591765
Author: Bharat Viswanadham 
Authored: Thu Jun 14 13:14:25 2018 -0700
Committer: Bharat Viswanadham 
Committed: Thu Jun 14 13:14:25 2018 -0700

--
 .../hadoop-yarn/hadoop-yarn-common/pom.xml |  2 +-
 .../org/apache/hadoop/yarn/webapp/view/JQueryUI.java   |  2 +-
 .../static/jquery/jquery-ui-1.12.1.custom.min.js   | 13 +
 .../static/jquery/jquery-ui-1.9.1.custom.min.js|  6 --
 4 files changed, 15 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index af1440a..eddcbaa 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -242,7 +242,7 @@
 src/main/resources/webapps/static/dt-1.9.4/images/Sorting 
icons.psd
 
src/main/resources/webapps/static/dt-1.9.4/js/jquery.dataTables.min.js
 
src/main/resources/webapps/static/jt/jquery.jstree.js
-
src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
+
src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
 
src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
src/main/resources/webapps/static/jquery/themes-1.9.1/base/jquery-ui.css
 
src/test/resources/application_1440536969523_0001.har/_index

http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
index d4fba1f..91e5f89 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
@@ -68,7 +68,7 @@ public class JQueryUI extends HtmlBlock {
 html.link(root_url("static/jquery/themes-1.9.1/base/jquery-ui.css"))
 .link(root_url("static/dt-1.9.4/css/jui-dt.css"))
 .script(root_url("static/jquery/jquery-3.3.1.min.js"))
-.script(root_url("static/jquery/jquery-ui-1.9.1.custom.min.js"))
+.script(root_url("static/jquery/jquery-ui-1.12.1.custom.min.js"))
 .script(root_url("static/dt-1.9.4/js/jquery.dataTables.min.js"))
 .script(root_url("static/yarn.dt.plugins.js"))
 .script(root_url("static/dt-sorting/natural.js"))


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/3] hadoop git commit: YARN-8426:Upgrade jquery-ui to 1.12.1 in YARN. Contributed by Sunil Govindan

2018-06-14 Thread bharat
http://git-wip-us.apache.org/repos/asf/hadoop/blob/361ffb26/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
new file mode 100644
index 000..25398a1
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-ui-1.12.1.custom.min.js
@@ -0,0 +1,13 @@
+/*! jQuery UI - v1.12.1 - 2016-09-14
+* http://jqueryui.com
+* Includes: widget.js, position.js, data.js, disable-selection.js, effect.js, 
effects/effect-blind.js, effects/effect-bounce.js, effects/effect-clip.js, 
effects/effect-drop.js, effects/effect-explode.js, effects/effect-fade.js, 
effects/effect-fold.js, effects/effect-highlight.js, effects/effect-puff.js, 
effects/effect-pulsate.js, effects/effect-scale.js, effects/effect-shake.js, 
effects/effect-size.js, effects/effect-slide.js, effects/effect-transfer.js, 
focusable.js, form-reset-mixin.js, jquery-1-7.js, keycode.js, labels.js, 
scroll-parent.js, tabbable.js, unique-id.js, widgets/accordion.js, 
widgets/autocomplete.js, widgets/button.js, widgets/checkboxradio.js, 
widgets/controlgroup.js, widgets/datepicker.js, widgets/dialog.js, 
widgets/draggable.js, widgets/droppable.js, widgets/menu.js, widgets/mouse.js, 
widgets/progressbar.js, widgets/resizable.js, widgets/selectable.js, 
widgets/selectmenu.js, widgets/slider.js, widgets/sortable.js, 
widgets/spinner.js, widgets/tabs.js, widgets/toolt
 ip.js
+* Copyright jQuery Foundation and other contributors; Licensed MIT */
+
+(function(t){"function"==typeof 
define&?define(["jquery"],t):t(jQuery)})(function(t){function 
e(t){for(var 
e=t.css("visibility");"inherit"===e;)t=t.parent(),e=t.css("visibility");return"hidden"!==e}function
 i(t){for(var 
e,i;t.length&[0]!==document;){if(e=t.css("position"),("absolute"===e||"relative"===e||"fixed"===e)&&(i=parseInt(t.css("zIndex"),10),!isNaN(i)&&0!==i))return
 i;t=t.parent()}return 0}function 
s(){this._curInst=null,this._keyEvent=!1,this._disabledInputs=[],this._datepickerShowing=!1,this._inDialog=!1,this._mainDivId="ui-datepicker-div",this._inlineClass="ui-datepicker-inline",this._appendClass="ui-datepicker-append",this._triggerClass="ui-datepicker-trigger",this._dialogClass="ui-datepicker-dialog",this._disableClass="ui-datepicker-disabled",this._unselectableClass="ui-datepicker-unselectable",this._currentClass="ui-datepicker-current-day",this._dayOverClass="ui-datepicker-days-cell-over",this.regional=[],this.regional[""]={closeText:"Done",prevText:"Prev"
 
,nextText:"Next",currentText:"Today",monthNames:["January","February","March","April","May","June","July","August","September","October","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],dayNames:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],dayNamesShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],dayNamesMin:["Su","Mo","Tu","We","Th","Fr","Sa"],weekHeader:"Wk",dateFormat:"mm/dd/yy",firstDay:0,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},this._defaults={showOn:"focus",showAnim:"fadeIn",showOptions:{},defaultDate:null,appendText:"",buttonText:"...",buttonImage:"",buttonImageOnly:!1,hideIfNoPrevNext:!1,navigationAsDateFormat:!1,gotoCurrent:!1,changeMonth:!1,changeYear:!1,yearRange:"c-10:c+10",showOtherMonths:!1,selectOtherMonths:!1,showWeek:!1,calculateWeek:this.iso8601Week,shortYearCutoff:"+10",minDate:null,maxDate:null,duration:"fast",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeM
 
onthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:"",altFormat:"",constrainInput:!0,showButtonPanel:!1,autoSize:!1,disabled:!1},t.extend(this._defaults,this.regional[""]),this.regional.en=t.extend(!0,{},this.regional[""]),this.regional["en-US"]=t.extend(!0,{},this.regional.en),this.dpDiv=n(t(""))}function n(e){var i="button, 
.ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a";return 
e.on("mouseout",i,function(){t(this).removeClass("ui-state-hover"),-1!==this.className.indexOf("ui-datepicker-prev")&(this).removeClass("ui-datepicker-prev-hover"),-1!==this.className.indexOf("ui-datepicker-next")&(this).removeClass("ui-datepicker-next-hover")}).on("mouseover",i,o)}function
 
o(){t.datepicker._isDisabledDatepicker(m.inline?m.dpDiv.parent()[0]:m.input[0])||(t(this).parents(".ui-datepicker-calendar")
 

hadoop git commit: YARN-8410. Fixed a bug in A record lookup by CNAME record. Contributed by Shane Kumpf

2018-06-14 Thread eyang
Repository: hadoop
Updated Branches:
  refs/heads/branch-3.1 e45541be5 -> d69c2e786


YARN-8410.  Fixed a bug in A record lookup by CNAME record.
Contributed by Shane Kumpf

(cherry picked from commit 9591765040b85927ac69179ab46383eef9560a28)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d69c2e78
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d69c2e78
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d69c2e78

Branch: refs/heads/branch-3.1
Commit: d69c2e7867ab87acfae69189efa9a1b19ee04670
Parents: e45541b
Author: Eric Yang 
Authored: Thu Jun 14 15:54:21 2018 -0400
Committer: Eric Yang 
Committed: Thu Jun 14 15:55:33 2018 -0400

--
 .../hadoop/registry/server/dns/RegistryDNS.java | 29 
 .../registry/server/dns/TestRegistryDNS.java| 23 ++--
 2 files changed, 44 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d69c2e78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
index 5e994fb..0022843 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
@@ -1126,19 +1126,38 @@ public class RegistryDNS extends AbstractService 
implements DNSOperations,
*/
   private byte remoteLookup(Message response, Name name, int type,
   int iterations) {
+// If retrieving the root zone, query for NS record type
+if (name.toString().equals(".")) {
+  type = Type.NS;
+}
+
+// Always add any CNAMEs to the response first
+if (type != Type.CNAME) {
+  Record[] cnameAnswers = getRecords(name, Type.CNAME);
+  if (cnameAnswers != null) {
+for (Record cnameR : cnameAnswers) {
+  if (!response.findRecord(cnameR)) {
+response.addRecord(cnameR, Section.ANSWER);
+  }
+}
+  }
+}
+
 // Forward lookup to primary DNS servers
 Record[] answers = getRecords(name, type);
 try {
   for (Record r : answers) {
-if (r.getType() == Type.SOA) {
-  response.addRecord(r, Section.AUTHORITY);
-} else {
-  response.addRecord(r, Section.ANSWER);
+if (!response.findRecord(r)) {
+  if (r.getType() == Type.SOA) {
+response.addRecord(r, Section.AUTHORITY);
+  } else {
+response.addRecord(r, Section.ANSWER);
+  }
 }
 if (r.getType() == Type.CNAME) {
   Name cname = ((CNAMERecord) r).getAlias();
   if (iterations < 6) {
-remoteLookup(response, cname, Type.CNAME, iterations + 1);
+remoteLookup(response, cname, type, iterations + 1);
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d69c2e78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
index 6ba58dd..969faf9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
@@ -410,7 +410,7 @@ public class TestRegistryDNS extends Assert {
 return recs;
   }
 
-  Record[] assertDNSQueryNotNull(String lookup, int type)
+  Record[] assertDNSQueryNotNull(String lookup, int type, int answerCount)
   throws IOException {
 Name name = Name.fromString(lookup);
 Record question = Record.newRecord(name, type, DClass.IN);
@@ -424,7 +424,7 @@ public class TestRegistryDNS extends Assert {
 assertEquals("Questions do not match", query.getQuestion(),
 response.getQuestion());
 Record[] recs = response.getSectionArray(Section.ANSWER);
-assertEquals(1, recs.length);
+assertEquals(answerCount, recs.length);
 assertEquals(recs[0].getType(), type);
 return recs;
   }
@@ -656,7 

hadoop git commit: YARN-8410. Fixed a bug in A record lookup by CNAME record. Contributed by Shane Kumpf

2018-06-14 Thread eyang
Repository: hadoop
Updated Branches:
  refs/heads/trunk 8d4926f38 -> 959176504


YARN-8410.  Fixed a bug in A record lookup by CNAME record.
Contributed by Shane Kumpf


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/95917650
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/95917650
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/95917650

Branch: refs/heads/trunk
Commit: 9591765040b85927ac69179ab46383eef9560a28
Parents: 8d4926f
Author: Eric Yang 
Authored: Thu Jun 14 15:54:21 2018 -0400
Committer: Eric Yang 
Committed: Thu Jun 14 15:54:21 2018 -0400

--
 .../hadoop/registry/server/dns/RegistryDNS.java | 29 
 .../registry/server/dns/TestRegistryDNS.java| 23 ++--
 2 files changed, 44 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/95917650/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
index 5e994fb..0022843 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
@@ -1126,19 +1126,38 @@ public class RegistryDNS extends AbstractService 
implements DNSOperations,
*/
   private byte remoteLookup(Message response, Name name, int type,
   int iterations) {
+// If retrieving the root zone, query for NS record type
+if (name.toString().equals(".")) {
+  type = Type.NS;
+}
+
+// Always add any CNAMEs to the response first
+if (type != Type.CNAME) {
+  Record[] cnameAnswers = getRecords(name, Type.CNAME);
+  if (cnameAnswers != null) {
+for (Record cnameR : cnameAnswers) {
+  if (!response.findRecord(cnameR)) {
+response.addRecord(cnameR, Section.ANSWER);
+  }
+}
+  }
+}
+
 // Forward lookup to primary DNS servers
 Record[] answers = getRecords(name, type);
 try {
   for (Record r : answers) {
-if (r.getType() == Type.SOA) {
-  response.addRecord(r, Section.AUTHORITY);
-} else {
-  response.addRecord(r, Section.ANSWER);
+if (!response.findRecord(r)) {
+  if (r.getType() == Type.SOA) {
+response.addRecord(r, Section.AUTHORITY);
+  } else {
+response.addRecord(r, Section.ANSWER);
+  }
 }
 if (r.getType() == Type.CNAME) {
   Name cname = ((CNAMERecord) r).getAlias();
   if (iterations < 6) {
-remoteLookup(response, cname, Type.CNAME, iterations + 1);
+remoteLookup(response, cname, type, iterations + 1);
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/95917650/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
index 6ba58dd..969faf9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
@@ -410,7 +410,7 @@ public class TestRegistryDNS extends Assert {
 return recs;
   }
 
-  Record[] assertDNSQueryNotNull(String lookup, int type)
+  Record[] assertDNSQueryNotNull(String lookup, int type, int answerCount)
   throws IOException {
 Name name = Name.fromString(lookup);
 Record question = Record.newRecord(name, type, DClass.IN);
@@ -424,7 +424,7 @@ public class TestRegistryDNS extends Assert {
 assertEquals("Questions do not match", query.getQuestion(),
 response.getQuestion());
 Record[] recs = response.getSectionArray(Section.ANSWER);
-assertEquals(1, recs.length);
+assertEquals(answerCount, recs.length);
 assertEquals(recs[0].getType(), type);
 return recs;
   }
@@ -656,7 +656,24 @@ public class TestRegistryDNS extends Assert {
 
 // start assessing 

[1/5] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 96a6798c1 -> 06c87edf5
  refs/heads/branch-2.9 158c0e8f7 -> 502b26384
  refs/heads/branch-3.0 6542e31b7 -> a3c0c0389
  refs/heads/branch-3.1 1aa6c9407 -> e45541be5
  refs/heads/trunk 9119b3cf8 -> 8d4926f38


HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8d4926f3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8d4926f3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8d4926f3

Branch: refs/heads/trunk
Commit: 8d4926f38bf53b32453cd2bc7322c8818f752f85
Parents: 9119b3c
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 09:58:50 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8d4926f3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index aa4d481..c6139c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -97,6 +97,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[3/5] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.

(cherry picked from commit 8d4926f38bf53b32453cd2bc7322c8818f752f85)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a3c0c038
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a3c0c038
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a3c0c038

Branch: refs/heads/branch-3.0
Commit: a3c0c0389941262f520e59e5872968b62734885e
Parents: 6542e31
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 09:59:50 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a3c0c038/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index abd96d0..06722e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -94,6 +94,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[5/5] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.

(cherry picked from commit 8d4926f38bf53b32453cd2bc7322c8818f752f85)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/502b2638
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/502b2638
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/502b2638

Branch: refs/heads/branch-2.9
Commit: 502b263849773a0eb8fc897e7c9b627f02924705
Parents: 158c0e8
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 10:00:40 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/502b2638/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index abd96d0..06722e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -94,6 +94,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[4/5] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.

(cherry picked from commit 8d4926f38bf53b32453cd2bc7322c8818f752f85)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/06c87edf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/06c87edf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/06c87edf

Branch: refs/heads/branch-2
Commit: 06c87edf51145895695436512fda2f1df4d4b5bb
Parents: 96a6798
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 10:00:26 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/06c87edf/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index abd96d0..06722e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -94,6 +94,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/5] hadoop git commit: HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas Majercak.

2018-06-14 Thread inigoiri
HDFS-13563. TestDFSAdminWithHA times out on Windows. Contributed by Lukas 
Majercak.

(cherry picked from commit 8d4926f38bf53b32453cd2bc7322c8818f752f85)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e45541be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e45541be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e45541be

Branch: refs/heads/branch-3.1
Commit: e45541be51b6ddba08ea4c2274509d17a1a87363
Parents: 1aa6c94
Author: Inigo Goiri 
Authored: Thu Jun 14 09:58:50 2018 -0700
Committer: Inigo Goiri 
Committed: Thu Jun 14 09:59:28 2018 -0700

--
 .../java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e45541be/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index aa4d481..c6139c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -97,6 +97,13 @@ public class TestDFSAdminWithHA {
 
 System.setOut(new PrintStream(out));
 System.setErr(new PrintStream(err));
+
+// Reduce the number of retries to speed up the tests.
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 3);
+conf.setInt(
+CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY,
+500);
   }
 
   @After


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[48/50] [abbrv] hadoop git commit: HDDS-161. Add functionality to queue ContainerClose command from SCM Heartbeat Response to Ratis. Contributed by Shashikant Banerjee.

2018-06-14 Thread stevel
HDDS-161. Add functionality to queue ContainerClose command from SCM Heartbeat 
Response to Ratis.
Contributed by Shashikant Banerjee.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7547740e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7547740e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7547740e

Branch: refs/heads/HADOOP-15407
Commit: 7547740e5c65edaa6c6f8aa1c8debabbdfb0945e
Parents: 2299488
Author: Anu Engineer 
Authored: Wed Jun 13 17:50:42 2018 -0700
Committer: Anu Engineer 
Committed: Wed Jun 13 18:48:59 2018 -0700

--
 .../statemachine/DatanodeStateMachine.java  |   9 +
 .../CloseContainerCommandHandler.java   |  21 +-
 .../commandhandler/CommandDispatcher.java   |   4 +
 .../common/transport/server/XceiverServer.java  |   7 +
 .../transport/server/XceiverServerGrpc.java |   9 +
 .../transport/server/XceiverServerSpi.java  |   7 +
 .../server/ratis/XceiverServerRatis.java|  56 -
 .../container/ozoneimpl/OzoneContainer.java |  62 +-
 .../commands/CloseContainerCommand.java |  12 +-
 .../StorageContainerDatanodeProtocol.proto  |   1 +
 .../container/CloseContainerEventHandler.java   |   3 +-
 .../scm/container/closer/ContainerCloser.java   |   7 +-
 .../TestCloseContainerByPipeline.java   | 221 +++
 .../TestCloseContainerHandler.java  |   7 +-
 14 files changed, 412 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7547740e/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
index cb4319d..dc4e673 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/DatanodeStateMachine.java
@@ -403,4 +403,13 @@ public class DatanodeStateMachine implements Closeable {
   public long getCommandHandled() {
 return commandsHandled;
   }
+
+  /**
+   * returns the Command Dispatcher.
+   * @return CommandDispatcher
+   */
+  @VisibleForTesting
+  public CommandDispatcher getCommandDispatcher() {
+return commandDispatcher;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7547740e/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
index e8c602d..45f2bbd 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/statemachine/commandhandler/CloseContainerCommandHandler.java
@@ -16,6 +16,8 @@
  */
 package org.apache.hadoop.ozone.container.common.statemachine.commandhandler;
 
+import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
+import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
 import org.apache.hadoop.hdds.protocol.proto
 .StorageContainerDatanodeProtocolProtos.SCMCommandProto;
 import org.apache.hadoop.hdds.protocol.proto
@@ -29,6 +31,8 @@ import org.apache.hadoop.util.Time;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.UUID;
+
 /**
  * Handler for close container command received from SCM.
  */
@@ -67,8 +71,23 @@ public class CloseContainerCommandHandler implements 
CommandHandler {
   CloseContainerCommandProto
   .parseFrom(command.getProtoBufMessage());
   containerID = closeContainerProto.getContainerID();
+  HddsProtos.ReplicationType replicationType =
+  closeContainerProto.getReplicationType();
+
+  ContainerProtos.CloseContainerRequestProto.Builder closeRequest =
+  ContainerProtos.CloseContainerRequestProto.newBuilder();
+  closeRequest.setContainerID(containerID);
 
-  container.getContainerManager().closeContainer(containerID);
+  

[45/50] [abbrv] hadoop git commit: HDDS-159. RestClient: Implement list operations for volume, bucket and keys. Contributed by Lokesh Jain.

2018-06-14 Thread stevel
HDDS-159. RestClient: Implement list operations for volume, bucket and keys. 
Contributed by Lokesh Jain.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7566e0ec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7566e0ec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7566e0ec

Branch: refs/heads/HADOOP-15407
Commit: 7566e0ec5f1aff4cf3c53f4ccc5f3b57fff1e216
Parents: 43baa03
Author: Xiaoyu Yao 
Authored: Wed Jun 13 11:43:18 2018 -0700
Committer: Xiaoyu Yao 
Committed: Wed Jun 13 11:43:18 2018 -0700

--
 .../robotframework/acceptance/ozone-shell.robot |  18 +--
 .../hadoop/ozone/client/rest/RestClient.java| 113 ++-
 .../hadoop/ozone/ozShell/TestOzoneShell.java|  33 +++---
 .../hadoop/ozone/web/client/TestBuckets.java|   3 -
 .../ozone/web/client/TestBucketsRatis.java  |   3 -
 .../hadoop/ozone/web/client/TestVolume.java |  12 +-
 6 files changed, 136 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7566e0ec/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
--
diff --git 
a/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
 
b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
index 7ff4910..1a91a93 100644
--- 
a/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
+++ 
b/hadoop-ozone/acceptance-test/src/test/robotframework/acceptance/ozone-shell.robot
@@ -38,7 +38,7 @@ Scale it up to 5 datanodes
 
 Test ozone shell (RestClient without http port)
 Execute on  datanodeozone oz -createVolume 
http://ksm/hive -user bilbo -quota 100TB -root
-${result} = Execute on  datanodeozone oz -listVolume 
o3://ksm -user bilbo | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.volumeName=="hive")'
+${result} = Execute on  datanodeozone oz -listVolume 
http://ksm -user bilbo | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.volumeName=="hive")'
 Should contain  ${result}   createdOn
 Execute on  datanodeozone oz -updateVolume 
http://ksm/hive -user bill -quota 10TB
 ${result} = Execute on  datanodeozone oz -infoVolume 
http://ksm/hive | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '. | 
select(.volumeName=="hive") | .owner | .name'
@@ -52,7 +52,7 @@ Test ozone shell (RestClient without http port)
 Should Be Equal ${result}   GROUP
 ${result} = Execute on  datanodeozone oz -updateBucket 
http://ksm/hive/bb1 -removeAcl group:samwise:r | grep -Ev 
'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '. | select(.bucketName=="bb1") | 
.acls | .[] | select(.name=="frodo") | .type'
 Should Be Equal ${result}   USER
-${result} = Execute on  datanodeozone oz -listBucket 
o3://ksm/hive/ | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] | 
select(.bucketName=="bb1") | .volumeName'
+${result} = Execute on  datanodeozone oz -listBucket 
http://ksm/hive/ | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] 
| select(.bucketName=="bb1") | .volumeName'
 Should Be Equal ${result}   hive
 Execute on  datanodeozone oz -putKey 
http://ksm/hive/bb1/key1 -file NOTICE.txt
 Execute on  datanoderm -f NOTICE.txt.1
@@ -60,7 +60,7 @@ Test ozone shell (RestClient without http port)
 Execute on  datanodels -l NOTICE.txt.1
 ${result} = Execute on  datanodeozone oz -infoKey 
http://ksm/hive/bb1/key1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq 
-r '. | select(.keyName=="key1")'
 Should contain  ${result}   createdOn
-${result} = Execute on  datanodeozone oz -listKey 
o3://ksm/hive/bb1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r '.[] 
| select(.keyName=="key1") | .keyName'
+${result} = Execute on  datanodeozone oz -listKey 
http://ksm/hive/bb1 | grep -Ev 'Removed|WARN|DEBUG|ERROR|INFO|TRACE' | jq -r 
'.[] | select(.keyName=="key1") | .keyName'
 Should Be Equal ${result}   key1
 Execute on  datanodeozone oz -deleteKey 
http://ksm/hive/bb1/key1 -v
 Execute on  datanodeozone oz -deleteBucket 
http://ksm/hive/bb1
@@ -68,7 +68,7 @@ Test ozone shell (RestClient 

[37/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
deleted file mode 100644
index 0e668e8..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/js/bootstrap.min.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*!
- * Bootstrap v3.0.2 by @fat and @mdo
- * Copyright 2013 Twitter, Inc.
- * Licensed under http://www.apache.org/licenses/LICENSE-2.0
- *
- * Designed and built with all the love in the world by @mdo and @fat.
- */
-
-if("undefined"==typeof jQuery)throw new Error("Bootstrap requires 
jQuery");+function(a){"use strict";function b(){var 
a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd
 otransitionend",transition:"transitionend"};for(var c in b)if(void 
0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var 
c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var 
e=function(){c||a(d).trigger(a.support.transition.end)};return 
setTimeout(e,b),this},a(function(){a.support.transition=b()})}(jQuery),+function(a){"use
 strict";var 
b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function
 c(){f.trigger("closed.bs.alert").remove()}var 
d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&(/.*(?=#[^\s]*$)/,""));var
 
f=a(e);b&(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert"
 
)),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var
 d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var 
d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new 
c(this)),"string"==typeof 
b&[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return
 
a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(jQuery),+function(a){"use
 strict";var 
b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d)};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(a){var
 
b="disabled",c=this.$element,d=c.is("input")?"val":"html",e=c.data();a+="Text",e.resetText||c.data("resetText",c[d]()),c[d](e[a]||this.options[a]),setTimeout(function(){"loadingText"==a?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var
 a=this.$element.closest('[data-toggle="buttons"]');i
 f(a.length){var 
b=this.$element.find("input").prop("checked",!this.$element.hasClass("active")).trigger("change");"radio"===b.prop("type")&(".active").removeClass("active")}this.$element.toggleClass("active")};var
 c=a.fn.button;a.fn.button=function(c){return this.each(function(){var 
d=a(this),e=d.data("bs.button"),f="object"==typeof 
c&e||d.data("bs.button",e=new 
b(this,f)),"toggle"==c?e.toggle():c&(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return
 
a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var
 
c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(jQuery),+function(a){"use
 strict";var 
b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&$element.on("mouseenter",a.proxy(this.pause,this)).o
 
n("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return
 
b||(this.paused=!1),this.interval&(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return
 
this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var
 c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 
0:this.sliding?this.$element.one("slid",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return
 b||(this.paused=!0),this.$element.find(".next, 
.prev").length&&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return
 this
 .sliding?void 0:this.slide("next")},b.prototype.prev=function(){return 
this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var 

[40/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
deleted file mode 100644
index 3deec34..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/css/bootstrap.min.css
+++ /dev/null
@@ -1,9 +0,0 @@
-/*!
- * Bootstrap v3.0.2 by @fat and @mdo
- * Copyright 2013 Twitter, Inc.
- * Licensed under http://www.apache.org/licenses/LICENSE-2.0
- *
- * Designed and built with all the love in the world by @mdo and @fat.
- */
-
-/*! normalize.css v2.1.3 | MIT License | git.io/normalize 
*/article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,video{display:inline-block}audio:not([controls]){display:none;height:0}[hidden],template{display:none}html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a{background:transparent}a:focus{outline:thin
 dotted}a:active,a:hover{outline:0}h1{margin:.67em 
0;font-size:2em}abbr[title]{border-bottom:1px 
dotted}b,strong{font-weight:bold}dfn{font-style:italic}hr{height:0;-moz-box-sizing:content-box;box-sizing:content-box}mark{color:#000;background:#ff0}code,kbd,pre,samp{font-family:monospace,serif;font-size:1em}pre{white-space:pre-wrap}q{quotes:"\201C"
 "\201D" "\2018" 
"\2019"}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:0}fieldset{paddi
 ng:.35em .625em .75em;margin:0 2px;border:1px solid 
#c0c0c0}legend{padding:0;border:0}button,input,select,textarea{margin:0;font-family:inherit;font-size:100%}button,input{line-height:normal}button,select{text-transform:none}button,html
 
input[type="button"],input[type="reset"],input[type="submit"]{cursor:pointer;-webkit-appearance:button}button[disabled],html
 
input[disabled]{cursor:default}input[type="checkbox"],input[type="radio"]{padding:0;box-sizing:border-box}input[type="search"]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}textarea{overflow:auto;vertical-align:top}table{border-collapse:collapse;border-spacing:0}@media
 
print{*{color:#000!important;text-shadow:none!important;background:transparent!important;box-shadow:none!impo
 rtant}a,a:visited{text-decoration:underline}a[href]:after{content:" (" 
attr(href) ")"}abbr[title]:after{content:" (" attr(title) 
")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px
 solid 
#999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100%!important}@page{margin:2cm
 
.5cm}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}select{background:#fff!important}.navbar{display:none}.table
 td,.table 
th{background-color:#fff!important}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px
 solid #000}.table{border-collapse:collapse!important}.table-bordered 
th,.table-bordered td{border:1px solid 
#ddd!important}}*,*:before,*:after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:62.5%;-webkit-tap-highlight-color:rgba(0,0,0,0)}body{font-family:"Helvetica
 Neue",Helvetica,Arial,sans-serif;font-size:14px;line-height:1.428571429;
 
color:#333;background-color:#fff}input,button,select,textarea{font-family:inherit;font-size:inherit;line-height:inherit}a{color:#428bca;text-decoration:none}a:hover,a:focus{color:#2a6496;text-decoration:underline}a:focus{outline:thin
 dotted #333;outline:5px auto 
-webkit-focus-ring-color;outline-offset:-2px}img{vertical-align:middle}.img-responsive{display:block;height:auto;max-width:100%}.img-rounded{border-radius:6px}.img-thumbnail{display:inline-block;height:auto;max-width:100%;padding:4px;line-height:1.428571429;background-color:#fff;border:1px
 solid #ddd;border-radius:4px;-webkit-transition:all .2s 
ease-in-out;transition:all .2s 
ease-in-out}.img-circle{border-radius:50%}hr{margin-top:20px;margin-bottom:20px;border:0;border-top:1px
 solid 
#eee}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}p{margin:0
 0 
10px}.lead{margin-bottom:20px;font-size:16px;font-weight:200;line-height:1.4}@media(min-width:768px){.lead{font-si
 

[49/50] [abbrv] hadoop git commit: HDDS-163. Add Datanode heartbeat dispatcher in SCM. Contributed by Nandakumar.

2018-06-14 Thread stevel
HDDS-163. Add Datanode heartbeat dispatcher in SCM.
Contributed by Nandakumar.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ddd09d59
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ddd09d59
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ddd09d59

Branch: refs/heads/HADOOP-15407
Commit: ddd09d59f3d9825f068026622720914e04c2e1d6
Parents: 7547740
Author: Anu Engineer 
Authored: Wed Jun 13 20:18:22 2018 -0700
Committer: Anu Engineer 
Committed: Wed Jun 13 20:18:22 2018 -0700

--
 .../container/common/report/ReportManager.java  |   3 +-
 .../scm/server/SCMDatanodeProtocolServer.java   | 119 +---
 .../SCMDatanodeContainerReportHandler.java  |  76 
 .../report/SCMDatanodeHeartbeatDispatcher.java  | 189 +++
 .../report/SCMDatanodeNodeReportHandler.java|  43 +
 .../server/report/SCMDatanodeReportHandler.java |  83 
 .../report/SCMDatanodeReportHandlerFactory.java |  82 
 .../hdds/scm/server/report/package-info.java|  57 ++
 .../TestSCMDatanodeContainerReportHandler.java  |  34 
 .../TestSCMDatanodeHeartbeatDispatcher.java | 138 ++
 .../TestSCMDatanodeNodeReportHandler.java   |  36 
 .../TestSCMDatanodeReportHandlerFactory.java|  51 +
 .../hdds/scm/server/report/package-info.java|  21 +++
 .../apache/hadoop/ozone/scm/TestSCMMetrics.java |  20 +-
 14 files changed, 875 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ddd09d59/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
--
diff --git 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
index c09282e..8097cd6 100644
--- 
a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
+++ 
b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/report/ReportManager.java
@@ -50,7 +50,8 @@ public final class ReportManager {
 List publishers) {
 this.context = context;
 this.publishers = publishers;
-this.executorService = HadoopExecutors.newScheduledThreadPool(1,
+this.executorService = HadoopExecutors.newScheduledThreadPool(
+publishers.size(),
 new ThreadFactoryBuilder().setDaemon(true)
 .setNameFormat("Datanode ReportManager Thread - %d").build());
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ddd09d59/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
--
diff --git 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
index 1b1645d..7d16161 100644
--- 
a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
+++ 
b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
@@ -69,7 +69,7 @@ import static org.apache.hadoop.hdds.protocol.proto
 
 
 import org.apache.hadoop.hdds.scm.HddsServerUtil;
-import org.apache.hadoop.hdds.scm.container.placement.metrics.ContainerStat;
+import org.apache.hadoop.hdds.scm.server.report.SCMDatanodeHeartbeatDispatcher;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
@@ -114,6 +114,7 @@ public class SCMDatanodeProtocolServer implements
 
   private final StorageContainerManager scm;
   private final InetSocketAddress datanodeRpcAddress;
+  private final SCMDatanodeHeartbeatDispatcher heartbeatDispatcher;
 
   public SCMDatanodeProtocolServer(final OzoneConfiguration conf,
   StorageContainerManager scm)  throws IOException {
@@ -148,14 +149,22 @@ public class SCMDatanodeProtocolServer implements
 updateRPCListenAddress(
 conf, OZONE_SCM_DATANODE_ADDRESS_KEY, datanodeRpcAddr,
 datanodeRpcServer);
+
+heartbeatDispatcher = SCMDatanodeHeartbeatDispatcher.newBuilder(conf, scm)
+.addHandlerFor(NodeReportProto.class)
+.addHandlerFor(ContainerReportsProto.class)
+.build();
   }
 
-  public InetSocketAddress getDatanodeRpcAddress() {
-return datanodeRpcAddress;
+  public void start() {
+LOG.info(
+StorageContainerManager.buildRpcServerStartMessage(
+"RPC server for DataNodes", 

[34/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
new file mode 100644
index 000..6167622
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css
@@ -0,0 +1,6757 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ */
+/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */
+html {
+  font-family: sans-serif;
+  -webkit-text-size-adjust: 100%;
+  -ms-text-size-adjust: 100%;
+}
+body {
+  margin: 0;
+}
+article,
+aside,
+details,
+figcaption,
+figure,
+footer,
+header,
+hgroup,
+main,
+menu,
+nav,
+section,
+summary {
+  display: block;
+}
+audio,
+canvas,
+progress,
+video {
+  display: inline-block;
+  vertical-align: baseline;
+}
+audio:not([controls]) {
+  display: none;
+  height: 0;
+}
+[hidden],
+template {
+  display: none;
+}
+a {
+  background-color: transparent;
+}
+a:active,
+a:hover {
+  outline: 0;
+}
+abbr[title] {
+  border-bottom: 1px dotted;
+}
+b,
+strong {
+  font-weight: bold;
+}
+dfn {
+  font-style: italic;
+}
+h1 {
+  margin: .67em 0;
+  font-size: 2em;
+}
+mark {
+  color: #000;
+  background: #ff0;
+}
+small {
+  font-size: 80%;
+}
+sub,
+sup {
+  position: relative;
+  font-size: 75%;
+  line-height: 0;
+  vertical-align: baseline;
+}
+sup {
+  top: -.5em;
+}
+sub {
+  bottom: -.25em;
+}
+img {
+  border: 0;
+}
+svg:not(:root) {
+  overflow: hidden;
+}
+figure {
+  margin: 1em 40px;
+}
+hr {
+  height: 0;
+  -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+  box-sizing: content-box;
+}
+pre {
+  overflow: auto;
+}
+code,
+kbd,
+pre,
+samp {
+  font-family: monospace, monospace;
+  font-size: 1em;
+}
+button,
+input,
+optgroup,
+select,
+textarea {
+  margin: 0;
+  font: inherit;
+  color: inherit;
+}
+button {
+  overflow: visible;
+}
+button,
+select {
+  text-transform: none;
+}
+button,
+html input[type="button"],
+input[type="reset"],
+input[type="submit"] {
+  -webkit-appearance: button;
+  cursor: pointer;
+}
+button[disabled],
+html input[disabled] {
+  cursor: default;
+}
+button::-moz-focus-inner,
+input::-moz-focus-inner {
+  padding: 0;
+  border: 0;
+}
+input {
+  line-height: normal;
+}
+input[type="checkbox"],
+input[type="radio"] {
+  -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+  box-sizing: border-box;
+  padding: 0;
+}
+input[type="number"]::-webkit-inner-spin-button,
+input[type="number"]::-webkit-outer-spin-button {
+  height: auto;
+}
+input[type="search"] {
+  -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+  box-sizing: content-box;
+  -webkit-appearance: textfield;
+}
+input[type="search"]::-webkit-search-cancel-button,
+input[type="search"]::-webkit-search-decoration {
+  -webkit-appearance: none;
+}
+fieldset {
+  padding: .35em .625em .75em;
+  margin: 0 2px;
+  border: 1px solid #c0c0c0;
+}
+legend {
+  padding: 0;
+  border: 0;
+}
+textarea {
+  overflow: auto;
+}
+optgroup {
+  font-weight: bold;
+}
+table {
+  border-spacing: 0;
+  border-collapse: collapse;
+}
+td,
+th {
+  padding: 0;
+}
+/*! Source: 
https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */
+@media print {
+  *,
+  *:before,
+  *:after {
+color: #000 !important;
+text-shadow: none !important;
+background: transparent !important;
+-webkit-box-shadow: none !important;
+box-shadow: none !important;
+  }
+  a,
+  a:visited {
+text-decoration: underline;
+  }
+  a[href]:after {
+content: " (" attr(href) ")";
+  }
+  abbr[title]:after {
+content: " (" attr(title) ")";
+  }
+  a[href^="#"]:after,
+  a[href^="javascript:"]:after {
+content: "";
+  }
+  pre,
+  blockquote {
+border: 1px solid #999;
+
+page-break-inside: avoid;
+  }
+  thead {
+display: table-header-group;
+  }
+  tr,
+  img {
+page-break-inside: avoid;
+  }
+  img {
+max-width: 100% !important;
+  }
+  p,
+  h2,
+  h3 {
+orphans: 3;
+widows: 3;
+  }
+  h2,
+  h3 {
+page-break-after: avoid;
+  }
+  .navbar {
+display: none;
+  }
+  .btn > .caret,
+  .dropup > .btn > .caret {
+border-top-color: #000 !important;
+  }
+  .label {
+border: 1px solid #000;
+  }
+  .table {
+border-collapse: collapse !important;
+  }
+  .table td,
+  .table th {
+background-color: #fff !important;
+  }
+  .table-bordered th,
+  .table-bordered td {
+border: 1px solid #ddd !important;
+  }
+}
+@font-face {
+  font-family: 'Glyphicons Halflings';
+
+  src: 

[24/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
new file mode 100644
index 000..4d9b3a2
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-3.3.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | 
jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof 
module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw
 new Error("jQuery requires a window with a document");return 
t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use 
strict";var 
n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function
 e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function 
e(t){return null!=t&===t.window},v={type:!0,src:!0,noModule:!0};function 
m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in 
v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function 
x(e){return null==e?e+"":"object"==typeof e||"function"==typeof 
e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new 
w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructo
 r:w,length:0,toArray:function(){return o.call(this)},get:function(e){return 
null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var 
t=w.merge(this.constructor(),e);return 
t.prevObject=this,t},each:function(e){return 
w.each(this,e)},map:function(e){return 
this.pushStack(w.map(this,function(t,n){return 
e.call(t,n,t)}))},slice:function(){return 
this.pushStack(o.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(e<0?t:0);return 
this.pushStack(n>=0&0& in e)}var E=function(e){var 
t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new 
Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return 
e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var
 n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new 
RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new 
RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new 
RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new 
RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new 
RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new
 RegExp("^(?:"+P+")$","i"),needsContext:new 
RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|n
 
th|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native
 \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new 
RegExp("([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var 
r="0x"+t-65536;return 
r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\u\w-]/g,ne=function(e,t){return
 t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" 
":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in 
e||"label"in 
e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var
 n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var 
o,s,l,c,f,h,v,m=t&,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof 
e
 ||!e||1!==T&&9!==T&&11!==T)return 
r;if(!i&&((t?t.ownerDocument||t:w)!==d&(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return
 r;if(l.id===o)return r.push(l),r}else 
if(m&&(l=m.getElementById(o))&(t,l)&===o)return 
r.push(l),r}else{if(f[2])return 
L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&)return
 L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" 
"]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else 
if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+"
 "+ve(h[s]);v=h.join(","),m=K.test(e)&(t.parentNode)||t}if(v)try{return 
L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&("id")}}}return
 u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return 
e.push(n+" ")>r.cacheLength& t[e.shift()],t[n+" "]=i}return t}function 
se(e){return e[b]=!0,
 e}function ue(e){var 

[20/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index 3338052..af1440a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -243,7 +243,7 @@
 
src/main/resources/webapps/static/dt-1.9.4/js/jquery.dataTables.min.js
 
src/main/resources/webapps/static/jt/jquery.jstree.js
 
src/main/resources/webapps/static/jquery/jquery-ui-1.9.1.custom.min.js
-
src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
+
src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
src/main/resources/webapps/static/jquery/themes-1.9.1/base/jquery-ui.css
 
src/test/resources/application_1440536969523_0001.har/_index
 
src/test/resources/application_1440536969523_0001.har/part-0

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
index dba19c9..d4fba1f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java
@@ -67,7 +67,7 @@ public class JQueryUI extends HtmlBlock {
   protected void render(Block html) {
 html.link(root_url("static/jquery/themes-1.9.1/base/jquery-ui.css"))
 .link(root_url("static/dt-1.9.4/css/jui-dt.css"))
-.script(root_url("static/jquery/jquery-1.8.2.min.js"))
+.script(root_url("static/jquery/jquery-3.3.1.min.js"))
 .script(root_url("static/jquery/jquery-ui-1.9.1.custom.min.js"))
 .script(root_url("static/dt-1.9.4/js/jquery.dataTables.min.js"))
 .script(root_url("static/yarn.dt.plugins.js"))


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[30/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
new file mode 100644
index 000..b93a495
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.eot
 differ


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[03/50] [abbrv] hadoop git commit: YARN-8363. Upgrade commons-lang version to 3.7 in hadoop-yarn-project. Contributed by Takanobu Asanuma.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/652bcbb3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAutoCreatedQueueBase.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAutoCreatedQueueBase.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAutoCreatedQueueBase.java
index addec66..f313d70 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAutoCreatedQueueBase.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerAutoCreatedQueueBase.java
@@ -19,7 +19,7 @@ package 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
-import org.apache.commons.lang.math.RandomUtils;
+import org.apache.commons.lang3.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -550,7 +550,7 @@ public class TestCapacitySchedulerAutoCreatedQueueBase {
   protected RMApp submitApp(String user, String queue, String nodeLabel)
   throws Exception {
 RMApp app = mockRM.submitApp(GB,
-"test-auto-queue-creation" + RandomUtils.nextInt(100), user, null,
+"test-auto-queue-creation" + RandomUtils.nextInt(0, 100), user, null,
 queue, nodeLabel);
 Assert.assertEquals(app.getAmNodeLabelExpression(), nodeLabel);
 // check preconditions

http://git-wip-us.apache.org/repos/asf/hadoop/blob/652bcbb3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java
index 07eaf97..4c4e371 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/FederationClientInterceptor.java
@@ -25,7 +25,7 @@ import java.util.Map;
 import java.util.Random;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.lang.NotImplementedException;
+import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
 import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
@@ -564,172 +564,172 @@ public class FederationClientInterceptor
   @Override
   public GetApplicationsResponse getApplications(GetApplicationsRequest 
request)
   throws YarnException, IOException {
-throw new NotImplementedException();
+throw new NotImplementedException("Code is not implemented");
   }
 
   @Override
   public GetClusterMetricsResponse getClusterMetrics(
   GetClusterMetricsRequest request) throws YarnException, IOException {
-throw new NotImplementedException();
+throw new NotImplementedException("Code is not implemented");
   }
 
   @Override
   public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest 
request)
   throws YarnException, IOException {
-throw new NotImplementedException();
+throw new NotImplementedException("Code is not implemented");
   }
 
   @Override
   public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request)
   throws YarnException, IOException {
-throw new NotImplementedException();
+throw new NotImplementedException("Code is not implemented");
   }
 
   @Override
   public GetQueueUserAclsInfoResponse getQueueUserAcls(
   GetQueueUserAclsInfoRequest request) throws YarnException, IOException {
-throw new NotImplementedException();
+throw new NotImplementedException("Code is not implemented");
   }
 
   

[43/50] [abbrv] hadoop git commit: HDFS-13641. Add metrics for edit log tailing. Contributed by Chao Sun.

2018-06-14 Thread stevel
HDFS-13641. Add metrics for edit log tailing. Contributed by Chao Sun.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8e7548d3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8e7548d3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8e7548d3

Branch: refs/heads/HADOOP-15407
Commit: 8e7548d33be9c4874daab18b2e774bdc2ed216d3
Parents: 6307962
Author: Yiqun Lin 
Authored: Wed Jun 13 20:05:55 2018 +0800
Committer: Yiqun Lin 
Committed: Wed Jun 13 20:05:55 2018 +0800

--
 .../hadoop-common/src/site/markdown/Metrics.md  | 12 
 .../org/apache/hadoop/test/MetricsAsserts.java  | 22 ++--
 .../hdfs/server/namenode/ha/EditLogTailer.java  | 11 
 .../namenode/metrics/NameNodeMetrics.java   | 59 +++-
 .../namenode/metrics/TestNameNodeMetrics.java   | 41 ++
 5 files changed, 140 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8e7548d3/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
--
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
index 2538491..676ab0b 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/Metrics.md
@@ -181,6 +181,18 @@ Each metrics record contains tags such as ProcessName, 
SessionId, and Hostname a
 | `WarmUpEDEKTimeAvgTime` | Average time of warming up EDEK in milliseconds |
 | `ResourceCheckTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of NameNode resource check latency in milliseconds. 
Percentile measurement is off by default, by watching no intervals. The 
intervals are specified by `dfs.metrics.percentiles.intervals`. |
 | `StorageBlockReport`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of storage block report latency in milliseconds. 
Percentile measurement is off by default, by watching no intervals. The 
intervals are specified by `dfs.metrics.percentiles.intervals`. |
+| `EditLogTailTimeNumOps` | Total number of times the standby NameNode tailed 
the edit log |
+| `EditLogTailTimeAvgTime` | Average time (in milliseconds) spent by standby 
NameNode in tailing edit log |
+| `EditLogTailTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time spent in tailing edit logs by standby 
NameNode, in milliseconds. Percentile measurement is off by default, by 
watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
+| `EditLogFetchTimeNumOps` | Total number of times the standby NameNode 
fetched remote edit streams from journal nodes |
+| `EditLogFetchTimeAvgTime` | Average time (in milliseconds) spent by standby 
NameNode in fetching remote edit streams from journal nodes |
+| `EditLogFetchTime`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time spent in fetching edit streams from journal 
nodes by standby NameNode, in milliseconds. Percentile measurement is off by 
default, by watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
+| `NumEditLogLoadedNumOps` | Total number of times edits were loaded by 
standby NameNode |
+| `NumEditLogLoadedAvgCount` | Average number of edits loaded by standby 
NameNode in each edit log tailing |
+| `NumEditLogLoaded`*num*`s(50/75/90/95/99)thPercentileCount` | The 
50/75/90/95/99th percentile of number of edits loaded by standby NameNode in 
each edit log tailing. Percentile measurement is off by default, by watching no 
intervals. The intervals are specified by `dfs.metrics.percentiles.intervals`. |
+| `EditLogTailIntervalNumOps` | Total number of intervals between edit log 
tailings by standby NameNode |
+| `EditLogTailIntervalAvgTime` | Average time of intervals between edit log 
tailings by standby NameNode in milliseconds |
+| `EditLogTailInterval`*num*`s(50/75/90/95/99)thPercentileLatency` | The 
50/75/90/95/99th percentile of time between edit log tailings by standby 
NameNode, in milliseconds. Percentile measurement is off by default, by 
watching no intervals. The intervals are specified by 
`dfs.metrics.percentiles.intervals`. |
 
 FSNamesystem
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8e7548d3/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java
 

[26/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
new file mode 100644
index 000..9bcd2fc
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.min.js
@@ -0,0 +1,7 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires 
jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" 
")[0].split(".");if(b[0]<2&[1]<9||1==b[0]&&9==b[1]&[2]<1||b[0]>3)throw new 
Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but 
lower than version 4")}(jQuery),+function(a){"use strict";function b(){var 
a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd
 otransitionend",transition:"transitionend"};for(var c in b)if(void 
0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var
 c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var 
e=function(){c||a(d).trigger(a.support.transition.end)};return 
setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){if(a(b
 .target).is(this))return 
b.handleObj.handler.apply(this,arguments)}})})}(jQuery),+function(a){"use 
strict";function b(b){return this.each(function(){var 
c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new 
d(this)),"string"==typeof b&[b].call(c)})}var 
c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.7",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function
 c(){g.detach().trigger("closed.bs.alert").remove()}var 
e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&(/.*(?=#[^\s]*$)/,""));var
 
g=a("#"===f?[]:f);b&(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var
 
e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return
 a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c
 ,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return 
this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof 
b&e||d.data("bs.button",e=new 
c(this,f)),"toggle"==b?e.toggle():b&(b)})}var 
c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.7",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var
 
c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c).prop(c,!0)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c).prop(c,!1))},this),0)},c.prototype.toggle=function(){var
 a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var 
c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeCla
 
ss("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&("change")}else
 
this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var
 
d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return
 
a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var
 
d=a(c.target).closest(".btn");b.call(d,"toggle"),a(c.target).is('input[type="radio"],
 
input[type="checkbox"]')||(c.preventDefault(),d.is("input,button")?d.trigger("focus"):d.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api
 
blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"us
 e strict";function b(b){return this.each(function(){var 
d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof
 b&),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new 
c(this,f)),"number"==typeof 
b?e.to(b):g?e[g]():f.interval&().cycle()})}var 

[15/50] [abbrv] hadoop git commit: HADOOP-15527. Improve delay check for stopping processes. Contributed by Vinod Kumar Vavilapalli

2018-06-14 Thread stevel
HADOOP-15527.  Improve delay check for stopping processes.
   Contributed by Vinod Kumar Vavilapalli


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/108da853
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/108da853
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/108da853

Branch: refs/heads/HADOOP-15407
Commit: 108da85320d65e37fe835de65866b818e5420587
Parents: 5670e89
Author: Eric Yang 
Authored: Tue Jun 12 20:40:32 2018 -0400
Committer: Eric Yang 
Committed: Tue Jun 12 20:40:32 2018 -0400

--
 .../src/main/bin/hadoop-functions.sh| 34 +++-
 .../src/test/scripts/hadoop_stop_daemon.bats| 24 +-
 2 files changed, 56 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/108da853/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index bee1430..cbedd972 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -2040,6 +2040,35 @@ function hadoop_start_secure_daemon_wrapper
   return 0
 }
 
+## @description  Wait till process dies or till timeout
+## @audience private
+## @stabilityevolving
+## @parampid
+## @paramtimeout
+function wait_process_to_die_or_timeout
+{
+  local pid=$1
+  local timeout=$2
+
+  # Normalize timeout
+  # Round up or down
+  timeout=$(printf "%.0f\n" "${timeout}")
+  if [[ ${timeout} -lt 1  ]]; then
+# minimum 1 second
+timeout=1
+  fi
+
+  # Wait to see if it's still alive
+  for (( i=0; i < "${timeout}"; i++ ))
+  do
+if kill -0 "${pid}" > /dev/null 2>&1; then
+  sleep 1
+else
+  break
+fi
+  done
+}
+
 ## @description  Stop the non-privileged `command` daemon with that
 ## @description  that is running at `pidfile`.
 ## @audience public
@@ -2060,11 +2089,14 @@ function hadoop_stop_daemon
 pid=$(cat "$pidfile")
 
 kill "${pid}" >/dev/null 2>&1
-sleep "${HADOOP_STOP_TIMEOUT}"
+
+wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
+
 if kill -0 "${pid}" > /dev/null 2>&1; then
   hadoop_error "WARNING: ${cmd} did not stop gracefully after 
${HADOOP_STOP_TIMEOUT} seconds: Trying to kill with kill -9"
   kill -9 "${pid}" >/dev/null 2>&1
 fi
+wait_process_to_die_or_timeout "${pid}" "${HADOOP_STOP_TIMEOUT}"
 if ps -p "${pid}" > /dev/null 2>&1; then
   hadoop_error "ERROR: Unable to kill ${pid}"
 else

http://git-wip-us.apache.org/repos/asf/hadoop/blob/108da853/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
index 023d01c..1483807 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_stop_daemon.bats
@@ -15,7 +15,7 @@
 
 load hadoop-functions_test_helper
 
-@test "hadoop_stop_daemon" {
+@test "hadoop_stop_daemon_changing_pid" {
   old_pid=12345
   new_pid=54321
   HADOOP_STOP_TIMEOUT=3
@@ -29,3 +29,25 @@ load hadoop-functions_test_helper
   [ -f pidfile ]
   [ "$(cat pidfile)" = "${new_pid}" ]
 }
+
+@test "hadoop_stop_daemon_force_kill" {
+
+  HADOOP_STOP_TIMEOUT=4
+
+  # Run the following in a sub-shell so that its termination doesn't affect 
the test
+  (sh ${TESTBINDIR}/process_with_sigterm_trap.sh ${TMP}/pidfile &)
+
+  # Wait for the process to go into tight loop
+  sleep 1
+
+  [ -f ${TMP}/pidfile ]
+  pid=$(cat "${TMP}/pidfile")
+
+  run hadoop_stop_daemon my_command ${TMP}/pidfile 2>&1
+
+  # The process should no longer be alive
+  ! kill -0 ${pid} > /dev/null 2>&1
+
+  # The PID file should be gone
+  [ ! -f ${TMP}/pidfile ]
+}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[22/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-ozone/pom.xml
--
diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml
index 5d57e10..cffef14 100644
--- a/hadoop-ozone/pom.xml
+++ b/hadoop-ozone/pom.xml
@@ -143,8 +143,8 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 src/main/webapps/router/robots.txt
 src/contrib/**
 src/site/resources/images/*
-webapps/static/bootstrap-3.0.2/**
-webapps/static/jquery-1.10.2.min.js
+webapps/static/bootstrap-3.3.7/**
+webapps/static/jquery-3.3.1.min.js
 webapps/static/jquery.dataTables.min.js
 webapps/static/nvd3-1.8.5.min.css.map
 webapps/static/nvd3-1.8.5.min.js


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[28/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
new file mode 100644
index 000..1413fc6
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.ttf
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
new file mode 100644
index 000..9e61285
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
new file mode 100644
index 000..64539b5
Binary files /dev/null and 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.woff2
 differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
new file mode 100644
index 000..539d6c1
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap-editable.min.js
@@ -0,0 +1,7 @@
+/*! X-editable - v1.5.0
+* In-place editing with Twitter Bootstrap, jQuery UI or pure jQuery
+* http://github.com/vitalets/x-editable
+* Copyright (c) 2013 Vitaliy Potapov; Licensed MIT */
+!function(a){"use strict";var 
b=function(b,c){this.options=a.extend({},a.fn.editableform.defaults,c),this.$div=a(b),this.options.scope||(this.options.scope=this)};b.prototype={constructor:b,initInput:function(){this.input=this.options.input,this.value=this.input.str2value(this.options.value),this.input.prerender()},initTemplate:function(){this.$form=a(a.fn.editableform.template)},initButtons:function(){var
 
b=this.$form.find(".editable-buttons");b.append(a.fn.editableform.buttons),"bottom"===this.options.showbuttons&("editable-buttons-bottom")},render:function(){this.$loading=a(a.fn.editableform.loading),this.$div.empty().append(this.$loading),this.initTemplate(),this.options.showbuttons?this.initButtons():this.$form.find(".editable-buttons").remove(),this.showLoading(),this.isSaving=!1,this.$div.triggerHandler("rendering"),this.initInput(),this.$form.find("div.editable-input").append(this.input.$tpl),this.$div.append(this.$form),a.when(this.input.render()).then(a.proxy(fu
 
nction(){if(this.options.showbuttons||this.input.autosubmit(),this.$form.find(".editable-cancel").click(a.proxy(this.cancel,this)),this.input.error)this.error(this.input.error),this.$form.find(".editable-submit").attr("disabled",!0),this.input.$input.attr("disabled",!0),this.$form.submit(function(a){a.preventDefault()});else{this.error(!1),this.input.$input.removeAttr("disabled"),this.$form.find(".editable-submit").removeAttr("disabled");var
 b=null===this.value||void 
0===this.value||""===this.value?this.options.defaultValue:this.value;this.input.value2input(b),this.$form.submit(a.proxy(this.submit,this))}this.$div.triggerHandler("rendered"),this.showForm(),this.input.postrender&()},this))},cancel:function(){this.$div.triggerHandler("cancel")},showLoading:function(){var
 
a,b;this.$form?(a=this.$form.outerWidth(),b=this.$form.outerHeight(),a&$loading.width(a),b&$loading.height(b),this.$form.hide()):(a=this.$loading.parent().width(),a&$loading.wid
 
th(a)),this.$loading.show()},showForm:function(a){this.$loading.hide(),this.$form.show(),a!==!1&(),this.$div.triggerHandler("show")},error:function(b){var
 

[09/50] [abbrv] hadoop git commit: HADOOP-15529. ContainerLaunch#testInvalidEnvVariableSubstitutionType is not supported in Windows. Contributed by Giovanni Matteo Fumarola.

2018-06-14 Thread stevel
HADOOP-15529. ContainerLaunch#testInvalidEnvVariableSubstitutionType is not 
supported in Windows. Contributed by Giovanni Matteo Fumarola.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6e756e8a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6e756e8a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6e756e8a

Branch: refs/heads/HADOOP-15407
Commit: 6e756e8a620e4d6dc3192986679060c52063489b
Parents: 10d0e4b
Author: Inigo Goiri 
Authored: Tue Jun 12 10:24:34 2018 -0700
Committer: Inigo Goiri 
Committed: Tue Jun 12 10:24:34 2018 -0700

--
 .../containermanager/launcher/TestContainerLaunch.java  | 12 ++--
 1 file changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6e756e8a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
index da9bc89..ebdceea 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
@@ -1920,7 +1920,11 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
   public void testInvalidEnvVariableSubstitutionType1() throws IOException {
 Map env = new HashMap();
 // invalid env
-env.put("testVar", "version${foo.version}");
+String invalidEnv = "version${foo.version}";
+if (Shell.WINDOWS) {
+  invalidEnv = "version%foo%<>^&|=:version%";
+}
+env.put("testVar", invalidEnv);
 validateShellExecutorForDifferentEnvs(env);
   }
 
@@ -1931,7 +1935,11 @@ public class TestContainerLaunch extends 
BaseContainerManagerTest {
   public void testInvalidEnvVariableSubstitutionType2() throws IOException {
 Map env = new HashMap();
 // invalid env
-env.put("testPath", "/abc:/${foo.path}:/$bar");
+String invalidEnv = "/abc:/${foo.path}:/$bar";
+if (Shell.WINDOWS) {
+  invalidEnv = "/abc:/%foo%<>^&|=:path%:/%bar%";
+}
+env.put("testPath", invalidEnv);
 validateShellExecutorForDifferentEnvs(env);
   }
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[10/50] [abbrv] hadoop git commit: YARN-8422. TestAMSimulator failing with NPE. Contributed by Giovanni Matteo Fumarola.

2018-06-14 Thread stevel
YARN-8422. TestAMSimulator failing with NPE. Contributed by Giovanni Matteo 
Fumarola.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c3548159
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c3548159
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c3548159

Branch: refs/heads/HADOOP-15407
Commit: c35481594ffc372e3f846b0c8ebc2ff9e36ffdb0
Parents: 6e756e8
Author: Inigo Goiri 
Authored: Tue Jun 12 10:59:50 2018 -0700
Committer: Inigo Goiri 
Committed: Tue Jun 12 10:59:50 2018 -0700

--
 .../hadoop/yarn/sls/appmaster/TestAMSimulator.java | 13 -
 1 file changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c3548159/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
--
diff --git 
a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
 
b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
index bfc7d0c..bc8ea70 100644
--- 
a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
+++ 
b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java
@@ -49,8 +49,8 @@ public class TestAMSimulator {
   private YarnConfiguration conf;
   private Path metricOutputDir;
 
-  private Class slsScheduler;
-  private Class scheduler;
+  private Class slsScheduler;
+  private Class scheduler;
 
   @Parameterized.Parameters
   public static Collection params() {
@@ -60,7 +60,7 @@ public class TestAMSimulator {
 });
   }
 
-  public TestAMSimulator(Class slsScheduler, Class scheduler) {
+  public TestAMSimulator(Class slsScheduler, Class scheduler) {
 this.slsScheduler = slsScheduler;
 this.scheduler = scheduler;
   }
@@ -115,7 +115,8 @@ public class TestAMSimulator {
   }
 
   private void createMetricOutputDir() {
-Path testDir = Paths.get(System.getProperty("test.build.data"));
+Path testDir =
+Paths.get(System.getProperty("test.build.data", "target/test-dir"));
 try {
   metricOutputDir = Files.createTempDirectory(testDir, "output");
 } catch (IOException e) {
@@ -153,7 +154,9 @@ public class TestAMSimulator {
 
   @After
   public void tearDown() {
-rm.stop();
+if (rm != null) {
+  rm.stop();
+}
 
 deleteMetricOutputDir();
   }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[50/50] [abbrv] hadoop git commit: YARN-8155. Improve ATSv2 client logging in RM and NM publisher. Contributed by Abhishek Modi.

2018-06-14 Thread stevel
YARN-8155. Improve ATSv2 client logging in RM and NM publisher. Contributed by 
Abhishek Modi.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9119b3cf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9119b3cf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9119b3cf

Branch: refs/heads/HADOOP-15407
Commit: 9119b3cf8f883aa2d5df534afc0c50249fed03c6
Parents: ddd09d5
Author: Rohith Sharma K S 
Authored: Thu Jun 14 12:38:10 2018 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jun 14 12:38:10 2018 +0530

--
 .../timelineservice/NMTimelinePublisher.java| 42 +---
 .../metrics/TimelineServiceV2Publisher.java |  8 +++-
 .../collector/TimelineCollectorWebService.java  | 19 ++---
 3 files changed, 56 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9119b3cf/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
index f451726..cbf3e5e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
@@ -190,9 +190,20 @@ public class NMTimelinePublisher extends CompositeService {
   LOG.error("Seems like client has been removed before the container"
   + " metric could be published for " + 
container.getContainerId());
 }
-  } catch (IOException | YarnException e) {
+  } catch (IOException e) {
 LOG.error("Failed to publish Container metrics for container "
-+ container.getContainerId(), e);
++ container.getContainerId());
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Failed to publish Container metrics for container "
+  + container.getContainerId(), e);
+}
+  } catch (YarnException e) {
+LOG.error("Failed to publish Container metrics for container "
++ container.getContainerId(), e.getMessage());
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Failed to publish Container metrics for container "
+  + container.getContainerId(), e);
+}
   }
 }
   }
@@ -284,9 +295,20 @@ public class NMTimelinePublisher extends CompositeService {
 LOG.error("Seems like client has been removed before the event could 
be"
 + " published for " + container.getContainerId());
   }
-} catch (IOException | YarnException e) {
+} catch (IOException e) {
+  LOG.error("Failed to publish Container metrics for container "
+  + container.getContainerId());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Failed to publish Container metrics for container "
++ container.getContainerId(), e);
+  }
+} catch (YarnException e) {
   LOG.error("Failed to publish Container metrics for container "
-  + container.getContainerId(), e);
+  + container.getContainerId(), e.getMessage());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Failed to publish Container metrics for container "
++ container.getContainerId(), e);
+  }
 }
   }
 
@@ -315,8 +337,16 @@ public class NMTimelinePublisher extends CompositeService {
 LOG.error("Seems like client has been removed before the entity "
 + "could be published for " + entity);
   }
-} catch (Exception e) {
-  LOG.error("Error when publishing entity " + entity, e);
+} catch (IOException e) {
+  LOG.error("Error when publishing entity " + entity);
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Error when publishing entity " + entity, e);
+  }
+} catch (YarnException e) {
+  LOG.error("Error when publishing entity " + entity, e.getMessage());
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Error when publishing entity " + entity, e);
+  }
 }
   }
 


[33/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
new file mode 100644
index 000..f010c82
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["bootstrap.css","less/normalize.less","less/print.less","less/glyphicons.less","less/scaffolding.less","less/mixins/vendor-prefixes.less","less/mixins/tab-focus.less","less/mixins/image.less","less/type.less","less/mixins/text-emphasis.less","less/mixins/background-variant.less","less/mixins/text-overflow.less","less/code.less","less/grid.less","less/mixins/grid.less","less/mixins/grid-framework.less","less/tables.less","less/mixins/table-row.less","less/forms.less","less/mixins/forms.less","less/buttons.less","less/mixins/buttons.less","less/mixins/opacity.less","less/component-animations.less","less/dropdowns.less","less/mixins/nav-divider.less","less/mixins/reset-filter.less","less/button-groups.less","less/mixins/border-radius.less","less/input-groups.less","less/navs.less","less/navbar.less","less/mixins/nav-vertical-align.less","less/utilities.less","less/breadcrumbs.less","less/pagination.less","less/mixins/pagination.less","less/pager.less","less/labe
 
ls.less","less/mixins/labels.less","less/badges.less","less/jumbotron.less","less/thumbnails.less","less/alerts.less","less/mixins/alerts.less","less/progress-bars.less","less/mixins/gradients.less","less/mixins/progress-bar.less","less/media.less","less/list-group.less","less/mixins/list-group.less","less/panels.less","less/mixins/panels.less","less/responsive-embed.less","less/wells.less","less/close.less","less/modals.less","less/tooltip.less","less/mixins/reset-text.less","less/popovers.less","less/carousel.less","less/mixins/clearfix.less","less/mixins/center-block.less","less/mixins/hide-text.less","less/responsive-utilities.less","less/mixins/responsive-visibility.less"],"names":[],"mappings":"GAIG;AACH,4EAA4E;ACG5E;EACE,wBAAA;EACA,2BAAA;EACA,+BAAA;CDDD;ACQD;EACE,UAAA;CDND;ACmBD;EAaE,eAAA;CDjBD;ACyBDEAIE,sBAAA;EACA,yBAAA;CDvBD;AC+BD;EACE,cAAA;EACA,UAAA;CD7BD;ACqCD;;EAEE,cAAA;CDnCD;AC6CD;EACE,8BAAA;CD3CD;ACmDD;;EAEE,WAAA;CDjDD;AC2DD;EACE,0BAAA;CDzDD;ACg
 
ED;;EAEE,kBAAA;CD9DD;ACqED;EACE,mBAAA;CDnED;AC2ED;EACE,eAAA;EACA,iBAAA;CDzED;ACgFD;EACE,iBAAA;EACA,YAAA;CD9ED;ACqFD;EACE,eAAA;CDnFD;AC0FD;;EAEE,eAAA;EACA,eAAA;EACA,mBAAA;EACA,yBAAA;CDxFD;AC2FD;EACE,YAAA;CDzFD;AC4FD;EACE,gBAAA;CD1FD;ACoGD;EACE,UAAA;CDlGD;ACyGD;EACE,iBAAA;CDvGD;ACiHD;EACE,iBAAA;CD/GD;ACsHD;EACE,gCAAA;KAAA,6BAAA;UAAA,wBAAA;EACA,UAAA;CDpHD;AC2HD;EACE,eAAA;CDzHD;ACgIDEAIE,kCAAA;EACA,eAAA;CD9HD;ACgJD;EAKE,eAAA;EACA,cAAA;EACA,UAAA;CD9ID;ACqJD;EACE,kBAAA;CDnJD;AC6JD;;EAEE,qBAAA;CD3JD;ACsKDEAIE,2BAAA;EACA,gBAAA;CDpKD;AC2KD;;EAEE,gBAAA;CDzKD;ACgLD;;EAEE,UAAA;EACA,WAAA;CD9KD;ACsLD;EACE,oBAAA;CDpLD;AC+LD;;EAEE,+BAAA;KAAA,4BAAA;UAAA,uBAAA;EACA,WAAA;CD7LD;ACsMD;;EAEE,aAAA;CDpMD;AC4MD;EACE,8BAAA;EACA,gCAAA;KAAA,6BAAA;UAAA,wBAAA;CD1MD;ACmND;;EAEE,yBAAA;CDjND;ACwND;EACE,0BAAA;EACA,cAAA;EACA,+BAAA;CDtND;AC8ND;EACE,UAAA;EACA,WAAA;CD5ND;ACmOD;EACE,eAAA;CDjOD;ACyOD;EACE,kBAAA;CDvOD;ACiPD;EACE,0BAAA;EACA,kBAAA;CD/OD;ACkPD;;EAEE,WAAA;CDhPD;AACD,qFAAqF;AElFrF;EA7FI;;;IAGI,mCAAA
 
;IACA,uBAAA;IACA,oCAAA;YAAA,4BAAA;IACA,6BAAA;GFkLL;EE/KC;;IAEI,2BAAA;GFiLL;EE9KC;IACI,6BAAA;GFgLL;EE7KC;IACI,8BAAA;GF+KL;EE1KC;;IAEI,YAAA;GF4KL;EEzKC;;IAEI,uBAAA;IACA,yBAAA;GF2KL;EExKC;IACI,4BAAA;GF0KL;EEvKC;;IAEI,yBAAA;GFyKL;EEtKC;IACI,2BAAA;GFwKL;EErKC;;;IAGI,WAAA;IACA,UAAA;GFuKL;EEpKC;;IAEI,wBAAA;GFsKL;EEhKC;IACI,cAAA;GFkKL;EEhKC;;IAGQ,kCAAA;GFiKT;EE9JC;IACI,uBAAA;GFgKL;EE7JC;IACI,qCAAA;GF+JL;EEhKC;;IAKQ,kCAAA;GF+JT;EE5JC;;IAGQ,kCAAA;GF6JT;CACF;AGnPD;EACE,oCAAA;EACA,sDAAA;EACA,gYAAA;CHqPD;AG7OD;EACE,mBAAA;EACA,SAAA;EACA,sBAAA;EACA,oCAAA;EACA,mBAAA;EACA,oBAAA;EACA,eAAA;EACA,oCAAA;EACA,mCAAA;CH+OD;AG3OmC;EAAW,iBAAA;CH8O9C;AG7OmC;EAAW,iBAAA;CHgP9C;AG9OmC;;EAAW,iBAAA;CHkP9C;AGjPmC;EAAW,iBAAA;CHoP9C;AGnPmC;EAAW,iBAAA;CHsP9C;AGrPmC;EAAW,iBAAA;CHwP9C;AGvPmC;EAAW,iBAAA;CH0P9C;AGzPmC;EAAW,iBAAA;CH4P9C;AG3PmC;EAAW,iBAAA;CH8P9C;AG7PmC;EAAW,iBAAA;CHgQ9C;AG/PmC;EAAW,iBAAA;CHkQ9C;AGjQmC;EAAW,iBAAA;CHoQ9C;AGnQmC;EAAW,iBAAA;CHsQ9C;AGrQmC;EAAW,iBAAA;CHwQ9C;AGvQmC;EAAW,iBAAA;CH0Q9C;AGzQmC;EAAW,iBA
 

[39/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
deleted file mode 100644
index 423bd5d..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.eot
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
deleted file mode 100644
index 4469488..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.svg
+++ /dev/null
@@ -1,229 +0,0 @@
-
-http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd; >
-http://www.w3.org/2000/svg;>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
deleted file mode 100644
index a498ef4..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.ttf
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
deleted file mode 100644
index d83c539..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/fonts/glyphicons-halflings-regular.woff
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
deleted file mode 100644
index 580b52a..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/clear.png
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
deleted file mode 100644
index 5b33f7e..000
Binary files 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.0.2/img/loading.gif
 and /dev/null differ


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[25/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
deleted file mode 100644
index da41706..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/jquery-1.10.2.min.js
+++ /dev/null
@@ -1,6 +0,0 @@
-/*! jQuery v1.10.2 | (c) 2005, 2013 jQuery Foundation, Inc. | 
jquery.org/license
-//@ sourceMappingURL=jquery-1.10.2.min.map
-*/
-(function(e,t){var n,r,i=typeof 
t,o=e.location,a=e.document,s=a.documentElement,l=e.jQuery,u=e.$,c={},p=[],f="1.10.2",d=p.concat,h=p.push,g=p.slice,m=p.indexOf,y=c.toString,v=c.hasOwnProperty,b=f.trim,x=function(e,t){return
 new 
x.fn.init(e,t,r)},w=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,T=/\S+/g,C=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,k=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,E=/^[\],:{}\s]*$/,S=/(?:^|:|,)(?:\s*\[)+/g,A=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,j=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,D=/^-ms-/,L=/-([\da-z])/gi,H=function(e,t){return
 
t.toUpperCase()},q=function(e){(a.addEventListener||"load"===e.type||"complete"===a.readyState)&&(_(),x.ready())},_=function(){a.addEventListener?(a.removeEventListener("DOMContentLoaded",q,!1),e.removeEventListener("load",q,!1)):(a.detachEvent("onreadystatechange",q),e.detachEvent("onload",q))};x.fn=x.prototype={jquery:f,constructor:x,init:function(e,n,r){var
 i,o;if(!e)return this;
 if("string"==typeof 
e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&>=3?[null,e,null]:N.exec(e),!i||!i[1]&)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n
 instanceof 
x?n[0]:n,x.merge(this,x.parseHTML(i[1],n&?n.ownerDocument||n:a,!0)),k.test(i[1])&(n))for(i
 in n)x.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return 
this}if(o=a.getElementById(i[2]),o&){if(o.id!==i[2])return 
r.find(e);this.length=1,this[0]=o}return 
this.context=a,this.selector=e,this}return 
e.nodeType?(this.context=this[0]=e,this.length=1,this):x.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),x.makeArray(e,this))},selector:"",length:0,toArray:function(){return
 g.call(this)},get:function(e){return 
null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var
 t=x.merge(this.constructor(),e);return 
t.prevObject=this,t.context=this.context,t},each:function(e,t){retur
 n x.each(this,e,t)},ready:function(e){return 
x.ready.promise().done(e),this},slice:function(){return 
this.pushStack(g.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(0>e?t:0);return 
this.pushStack(n>=0&>n?[this[n]]:[])},map:function(e){return 
this.pushStack(x.map(this,function(t,n){return 
e.call(t,n,t)}))},end:function(){return 
this.prevObject||this.constructor(null)},push:h,sort:[].sort,splice:[].splice},x.fn.init.prototype=x.fn,x.extend=x.fn.extend=function(){var
 
e,n,r,i,o,a,s=arguments[0]||{},l=1,u=arguments.length,c=!1;for("boolean"==typeof
 s&&(c=s,s=arguments[1]||{},l=2),"object"==typeof 
s||x.isFunction(s)||(s={}),u===l&&(s=this,--l);u>l;l++)if(null!=(o=arguments[l]))for(i
 in 
o)e=s[i],r=o[i],s!==r&&(c&&&(x.isPlainObject(r)||(n=x.isArray(r)))?(n?(n=!1,a=e&(e)?e:[]):a=e&(e)?e:{},s[i]=x.extend(c,a,r)):r!==t&&(s[i]=r));return
 s},x.extend({expando:"jQuery"+(f+Math.ran
 dom()).replace(/\D/g,""),noConflict:function(t){return 
e.$===x&&(e.$=u),t&===x&&(e.jQuery=l),x},isReady:!1,readyWait:1,holdReady:function(e){e?x.readyWait++:x.ready(!0)},ready:function(e){if(e===!0?!--x.readyWait:!x.isReady){if(!a.body)return
 
setTimeout(x.ready);x.isReady=!0,e!==!0&&--x.readyWait>0||(n.resolveWith(a,[x]),x.fn.trigger&(a).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===x.type(e)},isArray:Array.isArray||function(e){return"array"===x.type(e)},isWindow:function(e){return
 
null!=e&==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&(e)},type:function(e){return
 null==e?e+"":"object"==typeof e||"function"==typeof 
e?c[y.call(e)]||"object":typeof e},isPlainObject:function(e){var 
n;if(!e||"object"!==x.type(e)||e.nodeType||x.isWindow(e))return!1;try{if(e.constructor&&!v.call(e,"constructor")&&!v.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(r){return!1}if(x.support.ownLast)for(n
 in e)return v.call(e,n);fo
 r(n in e);return n===t||v.call(e,n)},isEmptyObject:function(e){var t;for(t in 
e)return!1;return!0},error:function(e){throw 
Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return 
null;"boolean"==typeof t&&(n=t,t=!1),t=t||a;var r=k.exec(e),i=!n&&[];return 

[16/50] [abbrv] hadoop git commit: YARN-8394. Improve data locality documentation for Capacity Scheduler. Contributed by Weiwei Yang.

2018-06-14 Thread stevel
YARN-8394. Improve data locality documentation for Capacity Scheduler. 
Contributed by Weiwei Yang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/29024a62
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/29024a62
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/29024a62

Branch: refs/heads/HADOOP-15407
Commit: 29024a62038c297f11e8992601f2522c7da7
Parents: 108da85
Author: Weiwei Yang 
Authored: Wed Jun 13 09:28:05 2018 +0800
Committer: Weiwei Yang 
Committed: Wed Jun 13 09:28:05 2018 +0800

--
 .../conf/capacity-scheduler.xml | 2 ++
 .../hadoop-yarn-site/src/site/markdown/CapacityScheduler.md | 5 +
 2 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/29024a62/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
index aca6c7c..62654ca 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/conf/capacity-scheduler.xml
@@ -149,6 +149,8 @@
   attempts to schedule rack-local containers.
   When setting this parameter, the size of the cluster should be taken 
into account.
   We use 40 as the default value, which is approximately the number of 
nodes in one rack.
+  Note, if this value is -1, the locality constraint in the container 
request
+  will be ignored, which disables the delay scheduling.
 
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/29024a62/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
index ef6381a..5be32d4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/CapacityScheduler.md
@@ -400,9 +400,14 @@ list of current scheduling edit policies as a comma 
separated string in `yarn.re
 
   * Data Locality
 
+Capacity Scheduler leverages `Delay Scheduling` to honor task locality 
constraints. There are 3 levels of locality constraint: node-local, rack-local 
and off-switch. The scheduler counts the number of missed opportunities when 
the locality cannot be satisfied, and waits this count to reach a threshold 
before relaxing the locality constraint to next level. The threshold can be 
configured in following properties:
+
 | Property | Description |
 |: |: |
 | `yarn.scheduler.capacity.node-locality-delay` | Number of missed scheduling 
opportunities after which the CapacityScheduler attempts to schedule rack-local 
containers. Typically, this should be set to number of nodes in the cluster. By 
default is setting approximately number of nodes in one rack which is 40. 
Positive integer value is expected. |
+| `yarn.scheduler.capacity.rack-locality-additional-delay` |  Number of 
additional missed scheduling opportunities over the node-locality-delay ones, 
after which the CapacityScheduler attempts to schedule off-switch containers. 
By default this value is set to -1, in this case, the number of missed 
opportunities for assigning off-switch containers is calculated based on the 
formula `L * C / N`, where `L` is number of locations (nodes or racks) 
specified in the resource request, `C` is the number of requested containers, 
and `N` is the size of the cluster. |
+
+Note, this feature should be disabled if YARN is deployed separately with the 
file system, as locality is meaningless. This can be done by setting 
`yarn.scheduler.capacity.node-locality-delay` to `-1`, in this case, request's 
locality constraint is ignored.
 
   * Container Allocation per NodeManager Heartbeat
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[14/50] [abbrv] hadoop git commit: MAPREDUCE-7101. Add config parameter to allow JHS to alway scan user dir irrespective of modTime. (Thomas Marquardt via asuresh)

2018-06-14 Thread stevel
MAPREDUCE-7101. Add config parameter to allow JHS to alway scan user dir 
irrespective of modTime. (Thomas Marquardt via asuresh)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5670e89b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5670e89b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5670e89b

Branch: refs/heads/HADOOP-15407
Commit: 5670e89b2ec69ab71e32dcd5acbd3a57ca6abea5
Parents: aeaf9fe
Author: Arun Suresh 
Authored: Tue Jun 12 15:36:52 2018 -0700
Committer: Arun Suresh 
Committed: Tue Jun 12 15:36:52 2018 -0700

--
 .../hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java   | 9 +++--
 .../src/main/resources/mapred-default.xml   | 9 +
 .../apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java   | 8 +++-
 3 files changed, 23 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
index 1cadf84..9e964e1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
@@ -61,8 +61,13 @@ public class JHAdminConfig {
 MR_HISTORY_PREFIX + "cleaner.interval-ms";
   public static final long DEFAULT_MR_HISTORY_CLEANER_INTERVAL_MS = 
 1 * 24 * 60 * 60 * 1000l; //1 day
-  
-  
+
+  /** Always scan user dir, irrespective of dir modification time.*/
+  public static final String MR_HISTORY_ALWAYS_SCAN_USER_DIR =
+  MR_HISTORY_PREFIX + "always-scan-user-dir";
+  public static final boolean DEFAULT_MR_HISTORY_ALWAYS_SCAN_USER_DIR =
+  false;
+
   /** The number of threads to handle client API requests.*/
   public static final String MR_HISTORY_CLIENT_THREAD_COUNT = 
 MR_HISTORY_PREFIX + "client.thread-count";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
index dcb312c..9f33d65 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
@@ -1775,6 +1775,15 @@
 
 
 
+  mapreduce.jobhistory.always-scan-user-dir
+  false
+  Some Cloud FileSystems do not currently update the
+  modification time of directories. To support these filesystems, this
+  configuration value should be set to 'true'.
+  
+
+
+
   mapreduce.jobhistory.done-dir
   ${yarn.app.mapreduce.am.staging-dir}/history/done
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5670e89b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
index a07ca26..7fe99a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
@@ -324,7 +324,13 @@ public class HistoryFileManager extends AbstractService {
   // so we need to have additional check.
   // Note: modTime (X second Y millisecond) could be casted to X second or
   // X+1 second.
-  if (modTime != newModTime

[46/50] [abbrv] hadoop git commit: YARN-8411. Restart stopped system service during RM start. Contributed by Billie Rinaldi

2018-06-14 Thread stevel
YARN-8411.  Restart stopped system service during RM start.
Contributed by Billie Rinaldi


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/69b05968
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/69b05968
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/69b05968

Branch: refs/heads/HADOOP-15407
Commit: 69b05968974994c6e22d6562a67b9392d1700094
Parents: 7566e0e
Author: Eric Yang 
Authored: Wed Jun 13 19:05:52 2018 -0400
Committer: Eric Yang 
Committed: Wed Jun 13 19:05:52 2018 -0400

--
 .../client/SystemServiceManagerImpl.java| 29 --
 .../hadoop/yarn/service/webapp/ApiServer.java   |  3 +-
 .../hadoop/yarn/service/ServiceClientTest.java  |  4 +-
 .../client/TestSystemServiceManagerImpl.java| 40 +---
 .../yarn/service/client/ServiceClient.java  | 14 +--
 5 files changed, 72 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/69b05968/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
index f9cfa92..08ad1b6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/SystemServiceManagerImpl.java
@@ -29,7 +29,9 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.service.SystemServiceManager;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.ServiceState;
+import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
 import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -228,12 +230,31 @@ public class SystemServiceManagerImpl extends 
AbstractService
   userUgi.doAs(new PrivilegedExceptionAction() {
 @Override public ApplicationId run()
 throws IOException, YarnException {
-  ApplicationId applicationId = 
serviceClient.actionCreate(service);
-  return applicationId;
+  boolean tryStart = true;
+  try {
+serviceClient.actionBuild(service);
+  } catch (Exception e) {
+if (e instanceof SliderException && ((SliderException) e)
+.getExitCode() == SliderExitCodes.EXIT_INSTANCE_EXISTS) {
+  LOG.info("Service {} already exists, will attempt to start " 
+
+  "service", service.getName());
+} else {
+  tryStart = false;
+  LOG.info("Got exception saving {}, will not attempt to " +
+  "start service", service.getName(), e);
+}
+  }
+  if (tryStart) {
+return serviceClient.actionStartAndGetId(service.getName());
+  } else {
+return null;
+  }
 }
   });
-  LOG.info("Service {} submitted with Application ID: {}",
-  service.getName(), applicationId);
+  if (applicationId != null) {
+LOG.info("Service {} submitted with Application ID: {}",
+service.getName(), applicationId);
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/69b05968/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
index 578273c..82fadae 100644
--- 

[18/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
new file mode 100644
index 000..4d9b3a2
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-3.3.1.min.js
@@ -0,0 +1,2 @@
+/*! jQuery v3.3.1 | (c) JS Foundation and other contributors | 
jquery.org/license */
+!function(e,t){"use strict";"object"==typeof module&&"object"==typeof 
module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw
 new Error("jQuery requires a window with a document");return 
t(e)}:t(e)}("undefined"!=typeof window?window:this,function(e,t){"use 
strict";var 
n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function
 e(t){return"function"==typeof t&&"number"!=typeof t.nodeType},y=function 
e(t){return null!=t&===t.window},v={type:!0,src:!0,noModule:!0};function 
m(e,t,n){var i,o=(t=t||r).createElement("script");if(o.text=e,n)for(i in 
v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function 
x(e){return null==e?e+"":"object"==typeof e||"function"==typeof 
e?l[c.call(e)]||"object":typeof e}var b="3.3.1",w=function(e,t){return new 
w.fn.init(e,t)},T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g;w.fn=w.prototype={jquery:"3.3.1",constructo
 r:w,length:0,toArray:function(){return o.call(this)},get:function(e){return 
null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var 
t=w.merge(this.constructor(),e);return 
t.prevObject=this,t},each:function(e){return 
w.each(this,e)},map:function(e){return 
this.pushStack(w.map(this,function(t,n){return 
e.call(t,n,t)}))},slice:function(){return 
this.pushStack(o.apply(this,arguments))},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var 
t=this.length,n=+e+(e<0?t:0);return 
this.pushStack(n>=0&0& in e)}var E=function(e){var 
t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b="sizzle"+1*new 
Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return 
e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var
 n=0,r=e.length;n+~]|"+M+")"+M+"*"),z=new 
RegExp("="+M+"*([^\\]'\"]*?)"+M+"*\\]","g"),X=new RegExp(W),U=new 
RegExp("^"+R+"$"),V={ID:new RegExp("^#("+R+")"),CLASS:new 
RegExp("^\\.("+R+")"),TAG:new RegExp("^("+R+"|[*])"),ATTR:new 
RegExp("^"+I),PSEUDO:new RegExp("^"+W),CHILD:new 
RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new
 RegExp("^(?:"+P+")$","i"),needsContext:new 
RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|n
 
th|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},G=/^(?:input|select|textarea|button)$/i,Y=/^h\d$/i,Q=/^[^{]+\{\s*\[native
 \w/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,K=/[+~]/,Z=new 
RegExp("([\\da-f]{1,6}"+M+"?|("+M+")|.)","ig"),ee=function(e,t,n){var 
r="0x"+t-65536;return 
r!==r||n?t:r<0?String.fromCharCode(r+65536):String.fromCharCode(r>>10|55296,1023|56320)},te=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\u\w-]/g,ne=function(e,t){return
 t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" 
":"\\"+e},re=function(){p()},ie=me(function(e){return!0===e.disabled&&("form"in 
e||"label"in 
e)},{dir:"parentNode",next:"legend"});try{L.apply(A=H.call(w.childNodes),w.childNodes),A[w.childNodes.length].nodeType}catch(e){L={apply:A.length?function(e,t){q.apply(e,H.call(t))}:function(e,t){var
 n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function oe(e,t,r,i){var 
o,s,l,c,f,h,v,m=t&,T=t?t.nodeType:9;if(r=r||[],"string"!=typeof 
e
 ||!e||1!==T&&9!==T&&11!==T)return 
r;if(!i&&((t?t.ownerDocument||t:w)!==d&(t),t=t||d,g)){if(11!==T&&(f=J.exec(e)))if(o=f[1]){if(9===T){if(!(l=t.getElementById(o)))return
 r;if(l.id===o)return r.push(l),r}else 
if(m&&(l=m.getElementById(o))&(t,l)&===o)return 
r.push(l),r}else{if(f[2])return 
L.apply(r,t.getElementsByTagName(e)),r;if((o=f[3])&&)return
 L.apply(r,t.getElementsByClassName(o)),r}if(n.qsa&&!S[e+" 
"]&&(!y||!y.test(e))){if(1!==T)m=t,v=e;else 
if("object"!==t.nodeName.toLowerCase()){(c=t.getAttribute("id"))?c=c.replace(te,ne):t.setAttribute("id",c=b),s=(h=a(e)).length;while(s--)h[s]="#"+c+"
 "+ve(h[s]);v=h.join(","),m=K.test(e)&(t.parentNode)||t}if(v)try{return 
L.apply(r,m.querySelectorAll(v)),r}catch(e){}finally{c===b&("id")}}}return
 u(e.replace(B,"$1"),t,r,i)}function ae(){var e=[];function t(n,i){return 
e.push(n+" ")>r.cacheLength& t[e.shift()],t[n+" 

[27/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
new file mode 100644
index 000..8a2e99a
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/js/bootstrap.js
@@ -0,0 +1,2377 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under the MIT license
+ */
+
+if (typeof jQuery === 'undefined') {
+  throw new Error('Bootstrap\'s JavaScript requires jQuery')
+}
+
++function ($) {
+  'use strict';
+  var version = $.fn.jquery.split(' ')[0].split('.')
+  if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 
9 && version[2] < 1) || (version[0] > 3)) {
+throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or 
higher, but lower than version 4')
+  }
+}(jQuery);
+
+/* 
+ * Bootstrap: transition.js v3.3.7
+ * http://getbootstrap.com/javascript/#transitions
+ * 
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *  */
+
+
++function ($) {
+  'use strict';
+
+  // CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
+  // 
+
+  function transitionEnd() {
+var el = document.createElement('bootstrap')
+
+var transEndEventNames = {
+  WebkitTransition : 'webkitTransitionEnd',
+  MozTransition: 'transitionend',
+  OTransition  : 'oTransitionEnd otransitionend',
+  transition   : 'transitionend'
+}
+
+for (var name in transEndEventNames) {
+  if (el.style[name] !== undefined) {
+return { end: transEndEventNames[name] }
+  }
+}
+
+return false // explicit for ie8 (  ._.)
+  }
+
+  // http://blog.alexmaccaw.com/css-transitions
+  $.fn.emulateTransitionEnd = function (duration) {
+var called = false
+var $el = this
+$(this).one('bsTransitionEnd', function () { called = true })
+var callback = function () { if (!called) 
$($el).trigger($.support.transition.end) }
+setTimeout(callback, duration)
+return this
+  }
+
+  $(function () {
+$.support.transition = transitionEnd()
+
+if (!$.support.transition) return
+
+$.event.special.bsTransitionEnd = {
+  bindType: $.support.transition.end,
+  delegateType: $.support.transition.end,
+  handle: function (e) {
+if ($(e.target).is(this)) return e.handleObj.handler.apply(this, 
arguments)
+  }
+}
+  })
+
+}(jQuery);
+
+/* 
+ * Bootstrap: alert.js v3.3.7
+ * http://getbootstrap.com/javascript/#alerts
+ * 
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *  */
+
+
++function ($) {
+  'use strict';
+
+  // ALERT CLASS DEFINITION
+  // ==
+
+  var dismiss = '[data-dismiss="alert"]'
+  var Alert   = function (el) {
+$(el).on('click', dismiss, this.close)
+  }
+
+  Alert.VERSION = '3.3.7'
+
+  Alert.TRANSITION_DURATION = 150
+
+  Alert.prototype.close = function (e) {
+var $this= $(this)
+var selector = $this.attr('data-target')
+
+if (!selector) {
+  selector = $this.attr('href')
+  selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip 
for ie7
+}
+
+var $parent = $(selector === '#' ? [] : selector)
+
+if (e) e.preventDefault()
+
+if (!$parent.length) {
+  $parent = $this.closest('.alert')
+}
+
+$parent.trigger(e = $.Event('close.bs.alert'))
+
+if (e.isDefaultPrevented()) return
+
+$parent.removeClass('in')
+
+function removeElement() {
+  // detach from parent, fire event then clean up data
+  $parent.detach().trigger('closed.bs.alert').remove()
+}
+
+$.support.transition && $parent.hasClass('fade') ?
+  $parent
+.one('bsTransitionEnd', removeElement)
+.emulateTransitionEnd(Alert.TRANSITION_DURATION) :
+  removeElement()
+  }
+
+
+  // ALERT PLUGIN DEFINITION
+  // ===
+
+  function Plugin(option) {
+return this.each(function () {
+  var $this = $(this)
+  var data  = $this.data('bs.alert')
+
+  if (!data) $this.data('bs.alert', (data = new Alert(this)))
+  

[29/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
new file mode 100644
index 000..f155876
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/fonts/glyphicons-halflings-regular.svg
@@ -0,0 +1,288 @@
+
+http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd; >
+http://www.w3.org/2000/svg;>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

[35/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
new file mode 100644
index 000..94813e9
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap-theme.min.css.map
@@ -0,0 +1 @@
+{"version":3,"sources":["less/theme.less","less/mixins/vendor-prefixes.less","less/mixins/gradients.less","less/mixins/reset-filter.less"],"names":[],"mappings":"AAmBA,YAAA,aAAA,UAAA,aAAA,aAAA,aAME,YAAA,EAAA,KAAA,EAAA,eC2CA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBDvCR,mBAAA,mBAAA,oBAAA,oBAAA,iBAAA,iBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBAAA,oBCsCA,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBDlCR,qBAAA,sBAAA,sBAAA,uBAAA,mBAAA,oBAAA,sBAAA,uBAAA,sBAAA,uBAAA,sBAAA,uBAAA,+BAAA,gCAAA,6BAAA,gCAAA,gCAAA,gCCiCA,mBAAA,KACQ,WAAA,KDlDV,mBAAA,oBAAA,iBAAA,oBAAA,oBAAA,oBAuBI,YAAA,KAyCF,YAAA,YAEE,iBAAA,KAKJ,aErEI,YAAA,EAAA,IAAA,EAAA,KACA,iBAAA,iDACA,iBAAA,4CAAA,iBAAA,qEAEA,iBAAA,+CCnBF,OAAA,+GH4CA,OAAA,0DACA,kBAAA,SAuC2C,aAAA,QAA2B,aAAA,KArCtE,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,Q
 
ACA,iBAAA,KAgBN,aEtEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAiBN,aEvEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAkBN,UExEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,gBAAA,gBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,iBAAA,iBAEE,iBAAA,QACA,aAAA,QAMA,mBAAA,0BAAA,yBAAA,0BAAA,yBAAA,yBAAA,oBAAA,2BAAA,0BAAA,2BAAA,0BAAA,0BAAA,6BAAA,oCAAA,mCAAA,oCAAA,mCAAA,mCAME,iBAAA,QACA,iBAAA,KAmBN,aEzEI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4
 
CA,kBAAA,SACA,aAAA,QAEA,mBAAA,mBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,oBAAA,oBAEE,iBAAA,QACA,aAAA,QAMA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,uBAAA,8BAAA,6BAAA,8BAAA,6BAAA,6BAAA,gCAAA,uCAAA,sCAAA,uCAAA,sCAAA,sCAME,iBAAA,QACA,iBAAA,KAoBN,YE1EI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDAEA,OAAA,+GCnBF,OAAA,0DH4CA,kBAAA,SACA,aAAA,QAEA,kBAAA,kBAEE,iBAAA,QACA,oBAAA,EAAA,MAGF,mBAAA,mBAEE,iBAAA,QACA,aAAA,QAMA,qBAAA,4BAAA,2BAAA,4BAAA,2BAAA,2BAAA,sBAAA,6BAAA,4BAAA,6BAAA,4BAAA,4BAAA,+BAAA,sCAAA,qCAAA,sCAAA,qCAAA,qCAME,iBAAA,QACA,iBAAA,KA2BN,eAAA,WClCE,mBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,EAAA,IAAA,IAAA,iBD2CV,0BAAA,0BE3FI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GF0FF,kBAAA,SAEF,yBAAA,+BAAA,+BEhGI,iBAAA,QACA,iBAAA,oDACA,iBAAA,+CAAA,iBAAA,wEACA,iBAAA,kDACA,OAAA,+GFgGF,kBAAA,SASF,gBE7GI,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SH+HA,cAAA,ICjEA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,iBD6DV,
 
sCAAA,oCE7GI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,iBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,iBD0EV,cAAA,iBAEE,YAAA,EAAA,IAAA,EAAA,sBAIF,gBEhII,iBAAA,iDACA,iBAAA,4CACA,iBAAA,qEAAA,iBAAA,+CACA,OAAA,+GACA,OAAA,0DCnBF,kBAAA,SHkJA,cAAA,IAHF,sCAAA,oCEhII,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SD2CF,mBAAA,MAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,IAAA,gBDgFV,8BAAA,iCAYI,YAAA,EAAA,KAAA,EAAA,gBAKJ,qBAAA,kBAAA,mBAGE,cAAA,EAqBF,yBAfI,mDAAA,yDAAA,yDAGE,MAAA,KE7JF,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,UFqKJ,OACE,YAAA,EAAA,IAAA,EAAA,qBC3HA,mBAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBACQ,WAAA,MAAA,EAAA,IAAA,EAAA,sBAAA,EAAA,IAAA,IAAA,gBDsIV,eEtLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAKF,YEvLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aAAA,QAMF,eExLI,iBAAA,oDACA,iBAAA,+CACA,iBAAA,wEAAA,iBAAA,kDACA,OAAA,+GACA,kBAAA,SF8KF,aA
 

[07/50] [abbrv] hadoop git commit: YARN-6931. Make the aggregation interval in AppLevelTimelineCollector configurable. (Abhishek Modi via Haibo Chen)

2018-06-14 Thread stevel
YARN-6931. Make the aggregation interval in AppLevelTimelineCollector 
configurable. (Abhishek Modi via Haibo Chen)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24a89825
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24a89825
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24a89825

Branch: refs/heads/HADOOP-15407
Commit: 24a89825f0cbc92b0a462152bc765e2195edd5a1
Parents: e9ea902
Author: Haibo Chen 
Authored: Tue Jun 12 10:03:07 2018 -0700
Committer: Haibo Chen 
Committed: Tue Jun 12 10:03:34 2018 -0700

--
 .../apache/hadoop/yarn/conf/YarnConfiguration.java|  9 +
 .../src/main/resources/yarn-default.xml   |  9 +
 .../collector/AppLevelTimelineCollectorWithAgg.java   | 14 +-
 3 files changed, 27 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24a89825/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index f7f82f8..5292a25 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2665,6 +2665,15 @@ public class YarnConfiguration extends Configuration {
   TIMELINE_SERVICE_PREFIX + "read.authentication.enabled";
 
   /**
+   * The name for setting that controls how often in-memory app level
+   * aggregation is kicked off in timeline collector.
+   */
+  public static final String TIMELINE_SERVICE_AGGREGATION_INTERVAL_SECS =
+  TIMELINE_SERVICE_PREFIX + "app-aggregation-interval-secs";
+
+  public static final int
+  DEFAULT_TIMELINE_SERVICE_AGGREGATION_INTERVAL_SECS = 15;
+  /**
* The default setting for authentication checks for reading timeline
* service v2 data.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24a89825/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index b0ffc48..2cc842f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2547,6 +2547,15 @@
 
   
 
+  The setting that controls how often in-memory app level
+  aggregation is kicked off in timeline collector.
+
+yarn.timeline-service.app-aggregation-interval-secs
+15
+  
+
+  
+
 The default hdfs location for flowrun coprocessor jar.
 
 yarn.timeline-service.hbase.coprocessor.jar.hdfs.location

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24a89825/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollectorWithAgg.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollectorWithAgg.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollectorWithAgg.java
index d7f47c8..aa041a5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollectorWithAgg.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollectorWithAgg.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import 

[32/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
new file mode 100644
index 000..ed3905e
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/bootstrap-3.3.7/css/bootstrap.min.css
@@ -0,0 +1,6 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ *//*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css 
*/html{font-family:sans-serif;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,menu,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background-color:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px
 dotted}b,strong{font-weight:700}dfn{font-style:italic}h1{margin:.67em 
0;font-size:2em}mark{color:#000;background:#ff0}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em
 
40px}hr{height:0;-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;fo
 
nt-size:1em}button,input,optgroup,select,textarea{margin:0;font:inherit;color:inherit}button{overflow:visible}button,select{text-transform:none}button,html
 
input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer}button[disabled],html
 
input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{padding:0;border:0}input{line-height:normal}input[type=checkbox],input[type=radio]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=number]::-webkit-inner-spin-button,input[type=number]::-webkit-outer-spin-button{height:auto}input[type=search]{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;-webkit-appearance:textfield}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}fieldset{padding:.35em
 .625em .75em;margin:0 2px;border:1px solid 
silver}legend{padding:0;border:0}textarea{overflow:
 
auto}optgroup{font-weight:700}table{border-spacing:0;border-collapse:collapse}td,th{padding:0}/*!
 Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css 
*/@media 
print{*,:after,:before{color:#000!important;text-shadow:none!important;background:0
 
0!important;-webkit-box-shadow:none!important;box-shadow:none!important}a,a:visited{text-decoration:underline}a[href]:after{content:"
 (" attr(href) ")"}abbr[title]:after{content:" (" attr(title) 
")"}a[href^="javascript:"]:after,a[href^="#"]:after{content:""}blockquote,pre{border:1px
 solid 
#999;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}.navbar{display:none}.btn>.caret,.dropup>.btn>.caret{border-top-color:#000!important}.label{border:1px
 solid #000}.table{border-collapse:collapse!important}.table td,.table 
th{background-color:#fff!important}.table-bordered td,.table-bordered 
th{border:1px so
 lid #ddd!important}}@font-face{font-family:'Glyphicons 
Halflings';src:url(../fonts/glyphicons-halflings-regular.eot);src:url(../fonts/glyphicons-halflings-regular.eot?#iefix)
 format('embedded-opentype'),url(../fonts/glyphicons-halflings-regular.woff2) 
format('woff2'),url(../fonts/glyphicons-halflings-regular.woff) 
format('woff'),url(../fonts/glyphicons-halflings-regular.ttf) 
format('truetype'),url(../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular)
 
format('svg')}.glyphicon{position:relative;top:1px;display:inline-block;font-family:'Glyphicons
 
Halflings';font-style:normal;font-weight:400;line-height:1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.glyphicon-asterisk:before{content:"\002a"}.glyphicon-plus:before{content:"\002b"}.glyphicon-eur:before,.glyphicon-euro:before{content:"\20ac"}.glyphicon-minus:before{content:"\2212"}.glyphicon-cloud:before{content:"\2601"}.glyphicon-envelope:before{content:"\2709"}.glyphicon-pencil:before{content:"\
 

[47/50] [abbrv] hadoop git commit: YARN-8259. Improve privileged docker container liveliness checks. Contributed by Shane Kumpf

2018-06-14 Thread stevel
YARN-8259.  Improve privileged docker container liveliness checks.
Contributed by Shane Kumpf


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/22994889
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/22994889
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/22994889

Branch: refs/heads/HADOOP-15407
Commit: 22994889dc449f966fb6462a3ac3d3bbaee3ac6a
Parents: 69b0596
Author: Eric Yang 
Authored: Wed Jun 13 19:24:31 2018 -0400
Committer: Eric Yang 
Committed: Wed Jun 13 19:24:31 2018 -0400

--
 .../runtime/DockerLinuxContainerRuntime.java| 26 ++-
 .../runtime/LinuxContainerRuntimeConstants.java |  2 ++
 .../runtime/TestDockerContainerRuntime.java | 34 +---
 .../src/site/markdown/DockerContainers.md   | 15 +
 4 files changed, 49 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/22994889/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
index e19379f..f13ba59 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DockerLinuxContainerRuntime.java
@@ -191,6 +191,7 @@ public class DockerLinuxContainerRuntime implements 
LinuxContainerRuntime {
   private static final Pattern USER_MOUNT_PATTERN = Pattern.compile(
   "(?<=^|,)([^:\\x00]+):([^:\\x00]+):([a-z]+)");
   private static final int HOST_NAME_LENGTH = 64;
+  private static final String DEFAULT_PROCFS = "/proc";
 
   @InterfaceAudience.Private
   public static final String ENV_DOCKER_CONTAINER_IMAGE =
@@ -1192,24 +1193,15 @@ public class DockerLinuxContainerRuntime implements 
LinuxContainerRuntime {
 
   private void executeLivelinessCheck(ContainerRuntimeContext ctx)
   throws ContainerExecutionException {
-PrivilegedOperation signalOp = new PrivilegedOperation(
-PrivilegedOperation.OperationType.SIGNAL_CONTAINER);
-signalOp.appendArgs(ctx.getExecutionAttribute(RUN_AS_USER),
-ctx.getExecutionAttribute(USER), Integer.toString(
-PrivilegedOperation.RunAsUserCommand.SIGNAL_CONTAINER.getValue()),
-ctx.getExecutionAttribute(PID),
-Integer.toString(ctx.getExecutionAttribute(SIGNAL).getValue()));
-signalOp.disableFailureLogging();
-try {
-  privilegedOperationExecutor.executePrivilegedOperation(null, signalOp,
-  null, ctx.getContainer().getLaunchContext().getEnvironment(), false,
-  false);
-} catch (PrivilegedOperationException e) {
-  String msg = "Liveliness check failed for PID: "
-  + ctx.getExecutionAttribute(PID)
+String procFs = ctx.getExecutionAttribute(PROCFS);
+if (procFs == null || procFs.isEmpty()) {
+  procFs = DEFAULT_PROCFS;
+}
+String pid = ctx.getExecutionAttribute(PID);
+if (!new File(procFs + File.separator + pid).exists()) {
+  String msg = "Liveliness check failed for PID: " + pid
   + ". Container may have already completed.";
-  throw new ContainerExecutionException(msg, e.getExitCode(), 
e.getOutput(),
-  e.getErrorOutput());
+  throw new ContainerExecutionException(msg);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/22994889/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/LinuxContainerRuntimeConstants.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/LinuxContainerRuntimeConstants.java
 

[19/50] [abbrv] hadoop git commit: HADOOP-15483. Upgrade jquery to version 3.3.1. Contributed by Lokesh Jain, Mukul Kumar Singh and Sunil Govindan.

2018-06-14 Thread stevel
http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4c7c911/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
deleted file mode 100644
index bc3fbc8..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/jquery/jquery-1.8.2.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-/*! jQuery v1.8.2 jquery.com | jquery.org/license */
-(function(a,b){function G(a){var b=F[a]={};return 
p.each(a.split(s),function(a,c){b[c]=!0}),b}function 
J(a,c,d){if(d===b&===1){var 
e="data-"+c.replace(I,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof 
d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:+d+""===d?+d:H.test(d)?p.parseJSON(d):d}catch(f){}p.data(a,c,d)}else
 d=b}return d}function K(a){var b;for(b in 
a){if(b==="data"&(a[b]))continue;if(b!=="toJSON")return!1}return!0}function
 ba(){return!1}function bb(){return!0}function 
bh(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function bi(a,b){do 
a=a[b];while(a&!==1);return a}function 
bj(a,b,c){b=b||0;if(p.isFunction(b))return p.grep(a,function(a,d){var 
e=!!b.call(a,d,a);return e===c});if(b.nodeType)return 
p.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var 
d=p.grep(a,function(a){return a.nodeType===1});if(be.test(b))return 
p.filter(b,d,!c);b=p.filter(b,d)}return p.grep(a,function(a,d){return p.inArray(
 a,b)>=0===c})}function bk(a){var 
b=bl.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return
 c}function bC(a,b){return 
a.getElementsByTagName(b)[0]||a.appendChild(a.ownerDocument.createElement(b))}function
 bD(a,b){if(b.nodeType!==1||!p.hasData(a))return;var 
c,d,e,f=p._data(a),g=p._data(b,f),h=f.events;if(h){delete 
g.handle,g.events={};for(c in 
h)for(d=0,e=h[c].length;d").appendTo(e.body),c=b.css("display");b.remove();if(c==="none"||c===""){bI=e.body.appendChild(bI||p.extend(e.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!bJ||!bI.
 
createElement)bJ=(bI.contentWindow||bI.contentDocument).document,bJ.write(""),bJ.close();b=bJ.body.appendChild(bJ.createElement(a)),c=bH(b,"display"),e.body.removeChild(bI)}return
 bS[a]=c,c}function ci(a,b,c,d){var 
e;if(p.isArray(b))p.each(b,function(b,e){c||ce.test(a)?d(a,e):ci(a+"["+(typeof 
e=="object"?b:"")+"]",e,c,d)});else if(!c&(b)==="object")for(e in 
b)ci(a+"["+e+"]",b[e],c,d);else d(a,b)}function cz(a){return 
function(b,c){typeof b!="string"&&(c=b,b="*");var 
d,e,f,g=b.toLowerCase().split(s),h=0,i=g.length;if(p.isFunction(c))for(;h)[^>]*$|#([\w\-]*)$)/,v=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,w=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,y=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,z=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,A=/^-ms-/,B=/-([\da-z])/gi,C=function(a,b){return(b+"").toUpperCase()},D=function(){e.addEventListener?(e.removeEventListener("DOMContentLoaded",D,!1),p.ready()):e.readyState==="complete"&&(e.detachEvent("onreadystatechange",D),p.ready())},E={};p.fn=p.prototype={constructor:p,init
 :function(a,c,d){var f,g,h,i;if(!a)return this;if(a.nodeType)return 
this.context=this[0]=a,this.length=1,this;if(typeof 
a=="string"){a.charAt(0)==="<"&(a.length-1)===">"&>=3?f=[null,a,null]:f=u.exec(a);if(f&&(f[1]||!c)){if(f[1])return
 c=c instanceof 
p?c[0]:c,i=c&?c.ownerDocument||c:e,a=p.parseHTML(f[1],i,!0),v.test(f[1])&(c)&(a,c,!0),p.merge(this,a);g=e.getElementById(f[2]);if(g&){if(g.id!==f[2])return
 d.find(a);this.length=1,this[0]=g}return 
this.context=e,this.selector=a,this}return!c||c.jquery?(c||d).find(a):this.constructor(c).find(a)}return
 
p.isFunction(a)?d.ready(a):(a.selector!==b&&(this.selector=a.selector,this.context=a.context),p.makeArray(a,this))},selector:"",jquery:"1.8.2",length:0,size:function(){return
 this.length},toArray:function(){return k.call(this)},get:function(a){return 
a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var
 d=p.merge(this.constructor(),a);ret
 urn 
d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?"
 
":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")"),d},each:function(a,b){return
 p.each(this,a,b)},ready:function(a){return 
p.ready.promise().done(a),this},eq:function(a){return 
a=+a,a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return 
this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return 
this.pushStack(k.apply(this,arguments),"slice",k.call(arguments).join(","))},map:function(a){return
 this.pushStack(p.map(this,function(b,c){return 
a.call(b,c,b)}))},end:function(){return 

  1   2   >