Repository: spark
Updated Branches:
  refs/heads/master e51b6eaa9 -> e4c1162b6


http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/resources/org/apache/spark/ui/static/jquery.mustache.js
----------------------------------------------------------------------
diff --git 
a/core/src/main/resources/org/apache/spark/ui/static/jquery.mustache.js 
b/core/src/main/resources/org/apache/spark/ui/static/jquery.mustache.js
new file mode 100644
index 0000000..14925bf
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/ui/static/jquery.mustache.js
@@ -0,0 +1,592 @@
+/*
+Shameless port of a shameless port
+@defunkt => @janl => @aq
+ 
+See http://github.com/defunkt/mustache for more info.
+*/
+ 
+;(function($) {
+
+/*!
+ * mustache.js - Logic-less {{mustache}} templates with JavaScript
+ * http://github.com/janl/mustache.js
+ */
+
+/*global define: false*/
+
+(function (root, factory) {
+  if (typeof exports === "object" && exports) {
+    factory(exports); // CommonJS
+  } else {
+    var mustache = {};
+    factory(mustache);
+    if (typeof define === "function" && define.amd) {
+      define(mustache); // AMD
+    } else {
+      root.Mustache = mustache; // <script>
+    }
+  }
+}(this, function (mustache) {
+
+  var whiteRe = /\s*/;
+  var spaceRe = /\s+/;
+  var nonSpaceRe = /\S/;
+  var eqRe = /\s*=/;
+  var curlyRe = /\s*\}/;
+  var tagRe = /#|\^|\/|>|\{|&|=|!/;
+
+  // Workaround for https://issues.apache.org/jira/browse/COUCHDB-577
+  // See https://github.com/janl/mustache.js/issues/189
+  var RegExp_test = RegExp.prototype.test;
+  function testRegExp(re, string) {
+    return RegExp_test.call(re, string);
+  }
+
+  function isWhitespace(string) {
+    return !testRegExp(nonSpaceRe, string);
+  }
+
+  var Object_toString = Object.prototype.toString;
+  var isArray = Array.isArray || function (object) {
+    return Object_toString.call(object) === '[object Array]';
+  };
+
+  function isFunction(object) {
+    return typeof object === 'function';
+  }
+
+  function escapeRegExp(string) {
+    return string.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&");
+  }
+
+  var entityMap = {
+    "&": "&amp;",
+    "<": "&lt;",
+    ">": "&gt;",
+    '"': '&quot;',
+    "'": '&#39;',
+    "/": '&#x2F;'
+  };
+
+  function escapeHtml(string) {
+    return String(string).replace(/[&<>"'\/]/g, function (s) {
+      return entityMap[s];
+    });
+  }
+
+  function escapeTags(tags) {
+    if (!isArray(tags) || tags.length !== 2) {
+      throw new Error('Invalid tags: ' + tags);
+    }
+
+    return [
+      new RegExp(escapeRegExp(tags[0]) + "\\s*"),
+      new RegExp("\\s*" + escapeRegExp(tags[1]))
+    ];
+  }
+
+  /**
+   * Breaks up the given `template` string into a tree of tokens. If the `tags`
+   * argument is given here it must be an array with two string values: the
+   * opening and closing tags used in the template (e.g. [ "<%", "%>" ]). Of
+   * course, the default is to use mustaches (i.e. mustache.tags).
+   *
+   * A token is an array with at least 4 elements. The first element is the
+   * mustache symbol that was used inside the tag, e.g. "#" or "&". If the tag
+   * did not contain a symbol (i.e. {{myValue}}) this element is "name". For
+   * all template text that appears outside a symbol this element is "text".
+   *
+   * The second element of a token is its "value". For mustache tags this is
+   * whatever else was inside the tag besides the opening symbol. For text 
tokens
+   * this is the text itself.
+   *
+   * The third and fourth elements of the token are the start and end indices
+   * in the original template of the token, respectively.
+   *
+   * Tokens that are the root node of a subtree contain two more elements: an
+   * array of tokens in the subtree and the index in the original template at 
which
+   * the closing tag for that section begins.
+   */
+  function parseTemplate(template, tags) {
+    tags = tags || mustache.tags;
+    template = template || '';
+
+    if (typeof tags === 'string') {
+      tags = tags.split(spaceRe);
+    }
+
+    var tagRes = escapeTags(tags);
+    var scanner = new Scanner(template);
+
+    var sections = [];     // Stack to hold section tokens
+    var tokens = [];       // Buffer to hold the tokens
+    var spaces = [];       // Indices of whitespace tokens on the current line
+    var hasTag = false;    // Is there a {{tag}} on the current line?
+    var nonSpace = false;  // Is there a non-space char on the current line?
+
+    // Strips all whitespace tokens array for the current line
+    // if there was a {{#tag}} on it and otherwise only space.
+    function stripSpace() {
+      if (hasTag && !nonSpace) {
+        while (spaces.length) {
+          delete tokens[spaces.pop()];
+        }
+      } else {
+        spaces = [];
+      }
+
+      hasTag = false;
+      nonSpace = false;
+    }
+
+    var start, type, value, chr, token, openSection;
+    while (!scanner.eos()) {
+      start = scanner.pos;
+
+      // Match any text between tags.
+      value = scanner.scanUntil(tagRes[0]);
+      if (value) {
+        for (var i = 0, len = value.length; i < len; ++i) {
+          chr = value.charAt(i);
+
+          if (isWhitespace(chr)) {
+            spaces.push(tokens.length);
+          } else {
+            nonSpace = true;
+          }
+
+          tokens.push(['text', chr, start, start + 1]);
+          start += 1;
+
+          // Check for whitespace on the current line.
+          if (chr === '\n') {
+            stripSpace();
+          }
+        }
+      }
+
+      // Match the opening tag.
+      if (!scanner.scan(tagRes[0])) break;
+      hasTag = true;
+
+      // Get the tag type.
+      type = scanner.scan(tagRe) || 'name';
+      scanner.scan(whiteRe);
+
+      // Get the tag value.
+      if (type === '=') {
+        value = scanner.scanUntil(eqRe);
+        scanner.scan(eqRe);
+        scanner.scanUntil(tagRes[1]);
+      } else if (type === '{') {
+        value = scanner.scanUntil(new RegExp('\\s*' + escapeRegExp('}' + 
tags[1])));
+        scanner.scan(curlyRe);
+        scanner.scanUntil(tagRes[1]);
+        type = '&';
+      } else {
+        value = scanner.scanUntil(tagRes[1]);
+      }
+
+      // Match the closing tag.
+      if (!scanner.scan(tagRes[1])) {
+        throw new Error('Unclosed tag at ' + scanner.pos);
+      }
+
+      token = [ type, value, start, scanner.pos ];
+      tokens.push(token);
+
+      if (type === '#' || type === '^') {
+        sections.push(token);
+      } else if (type === '/') {
+        // Check section nesting.
+        openSection = sections.pop();
+
+        if (!openSection) {
+          throw new Error('Unopened section "' + value + '" at ' + start);
+        }
+        if (openSection[1] !== value) {
+          throw new Error('Unclosed section "' + openSection[1] + '" at ' + 
start);
+        }
+      } else if (type === 'name' || type === '{' || type === '&') {
+        nonSpace = true;
+      } else if (type === '=') {
+        // Set the tags for the next time around.
+        tagRes = escapeTags(tags = value.split(spaceRe));
+      }
+    }
+
+    // Make sure there are no open sections when we're done.
+    openSection = sections.pop();
+    if (openSection) {
+      throw new Error('Unclosed section "' + openSection[1] + '" at ' + 
scanner.pos);
+    }
+
+    return nestTokens(squashTokens(tokens));
+  }
+
+  /**
+   * Combines the values of consecutive text tokens in the given `tokens` array
+   * to a single token.
+   */
+  function squashTokens(tokens) {
+    var squashedTokens = [];
+
+    var token, lastToken;
+    for (var i = 0, len = tokens.length; i < len; ++i) {
+      token = tokens[i];
+
+      if (token) {
+        if (token[0] === 'text' && lastToken && lastToken[0] === 'text') {
+          lastToken[1] += token[1];
+          lastToken[3] = token[3];
+        } else {
+          squashedTokens.push(token);
+          lastToken = token;
+        }
+      }
+    }
+
+    return squashedTokens;
+  }
+
+  /**
+   * Forms the given array of `tokens` into a nested tree structure where
+   * tokens that represent a section have two additional items: 1) an array of
+   * all tokens that appear in that section and 2) the index in the original
+   * template that represents the end of that section.
+   */
+  function nestTokens(tokens) {
+    var nestedTokens = [];
+    var collector = nestedTokens;
+    var sections = [];
+
+    var token, section;
+    for (var i = 0, len = tokens.length; i < len; ++i) {
+      token = tokens[i];
+
+      switch (token[0]) {
+      case '#':
+      case '^':
+        collector.push(token);
+        sections.push(token);
+        collector = token[4] = [];
+        break;
+      case '/':
+        section = sections.pop();
+        section[5] = token[2];
+        collector = sections.length > 0 ? sections[sections.length - 1][4] : 
nestedTokens;
+        break;
+      default:
+        collector.push(token);
+      }
+    }
+
+    return nestedTokens;
+  }
+
+  /**
+   * A simple string scanner that is used by the template parser to find
+   * tokens in template strings.
+   */
+  function Scanner(string) {
+    this.string = string;
+    this.tail = string;
+    this.pos = 0;
+  }
+
+  /**
+   * Returns `true` if the tail is empty (end of string).
+   */
+  Scanner.prototype.eos = function () {
+    return this.tail === "";
+  };
+
+  /**
+   * Tries to match the given regular expression at the current position.
+   * Returns the matched text if it can match, the empty string otherwise.
+   */
+  Scanner.prototype.scan = function (re) {
+    var match = this.tail.match(re);
+
+    if (match && match.index === 0) {
+      var string = match[0];
+      this.tail = this.tail.substring(string.length);
+      this.pos += string.length;
+      return string;
+    }
+
+    return "";
+  };
+
+  /**
+   * Skips all text until the given regular expression can be matched. Returns
+   * the skipped string, which is the entire tail if no match can be made.
+   */
+  Scanner.prototype.scanUntil = function (re) {
+    var index = this.tail.search(re), match;
+
+    switch (index) {
+    case -1:
+      match = this.tail;
+      this.tail = "";
+      break;
+    case 0:
+      match = "";
+      break;
+    default:
+      match = this.tail.substring(0, index);
+      this.tail = this.tail.substring(index);
+    }
+
+    this.pos += match.length;
+
+    return match;
+  };
+
+  /**
+   * Represents a rendering context by wrapping a view object and
+   * maintaining a reference to the parent context.
+   */
+  function Context(view, parentContext) {
+    this.view = view == null ? {} : view;
+    this.cache = { '.': this.view };
+    this.parent = parentContext;
+  }
+
+  /**
+   * Creates a new context using the given view with this context
+   * as the parent.
+   */
+  Context.prototype.push = function (view) {
+    return new Context(view, this);
+  };
+
+  /**
+   * Returns the value of the given name in this context, traversing
+   * up the context hierarchy if the value is absent in this context's view.
+   */
+  Context.prototype.lookup = function (name) {
+    var value;
+    if (name in this.cache) {
+      value = this.cache[name];
+    } else {
+      var context = this;
+
+      while (context) {
+        if (name.indexOf('.') > 0) {
+          value = context.view;
+
+          var names = name.split('.'), i = 0;
+          while (value != null && i < names.length) {
+            value = value[names[i++]];
+          }
+        } else {
+          value = context.view[name];
+        }
+
+        if (value != null) break;
+
+        context = context.parent;
+      }
+
+      this.cache[name] = value;
+    }
+
+    if (isFunction(value)) {
+      value = value.call(this.view);
+    }
+
+    return value;
+  };
+
+  /**
+   * A Writer knows how to take a stream of tokens and render them to a
+   * string, given a context. It also maintains a cache of templates to
+   * avoid the need to parse the same template twice.
+   */
+  function Writer() {
+    this.cache = {};
+  }
+
+  /**
+   * Clears all cached templates in this writer.
+   */
+  Writer.prototype.clearCache = function () {
+    this.cache = {};
+  };
+
+  /**
+   * Parses and caches the given `template` and returns the array of tokens
+   * that is generated from the parse.
+   */
+  Writer.prototype.parse = function (template, tags) {
+    var cache = this.cache;
+    var tokens = cache[template];
+
+    if (tokens == null) {
+      tokens = cache[template] = parseTemplate(template, tags);
+    }
+
+    return tokens;
+  };
+
+  /**
+   * High-level method that is used to render the given `template` with
+   * the given `view`.
+   *
+   * The optional `partials` argument may be an object that contains the
+   * names and templates of partials that are used in the template. It may
+   * also be a function that is used to load partial templates on the fly
+   * that takes a single argument: the name of the partial.
+   */
+  Writer.prototype.render = function (template, view, partials) {
+    var tokens = this.parse(template);
+    var context = (view instanceof Context) ? view : new Context(view);
+    return this.renderTokens(tokens, context, partials, template);
+  };
+
+  /**
+   * Low-level method that renders the given array of `tokens` using
+   * the given `context` and `partials`.
+   *
+   * Note: The `originalTemplate` is only ever used to extract the portion
+   * of the original template that was contained in a higher-order section.
+   * If the template doesn't use higher-order sections, this argument may
+   * be omitted.
+   */
+  Writer.prototype.renderTokens = function (tokens, context, partials, 
originalTemplate) {
+    var buffer = '';
+
+    // This function is used to render an arbitrary template
+    // in the current context by higher-order sections.
+    var self = this;
+    function subRender(template) {
+      return self.render(template, context, partials);
+    }
+
+    var token, value;
+    for (var i = 0, len = tokens.length; i < len; ++i) {
+      token = tokens[i];
+
+      switch (token[0]) {
+      case '#':
+        value = context.lookup(token[1]);
+        if (!value) continue;
+
+        if (isArray(value)) {
+          for (var j = 0, jlen = value.length; j < jlen; ++j) {
+            buffer += this.renderTokens(token[4], context.push(value[j]), 
partials, originalTemplate);
+          }
+        } else if (typeof value === 'object' || typeof value === 'string') {
+          buffer += this.renderTokens(token[4], context.push(value), partials, 
originalTemplate);
+        } else if (isFunction(value)) {
+          if (typeof originalTemplate !== 'string') {
+            throw new Error('Cannot use higher-order sections without the 
original template');
+          }
+
+          // Extract the portion of the original template that the section 
contains.
+          value = value.call(context.view, originalTemplate.slice(token[3], 
token[5]), subRender);
+
+          if (value != null) buffer += value;
+        } else {
+          buffer += this.renderTokens(token[4], context, partials, 
originalTemplate);
+        }
+
+        break;
+      case '^':
+        value = context.lookup(token[1]);
+
+        // Use JavaScript's definition of falsy. Include empty arrays.
+        // See https://github.com/janl/mustache.js/issues/186
+        if (!value || (isArray(value) && value.length === 0)) {
+          buffer += this.renderTokens(token[4], context, partials, 
originalTemplate);
+        }
+
+        break;
+      case '>':
+        if (!partials) continue;
+        value = isFunction(partials) ? partials(token[1]) : partials[token[1]];
+        if (value != null) buffer += this.renderTokens(this.parse(value), 
context, partials, value);
+        break;
+      case '&':
+        value = context.lookup(token[1]);
+        if (value != null) buffer += value;
+        break;
+      case 'name':
+        value = context.lookup(token[1]);
+        if (value != null) buffer += mustache.escape(value);
+        break;
+      case 'text':
+        buffer += token[1];
+        break;
+      }
+    }
+
+    return buffer;
+  };
+
+  mustache.name = "mustache.js";
+  mustache.version = "0.8.1";
+  mustache.tags = [ "{{", "}}" ];
+
+  // All high-level mustache.* functions use this writer.
+  var defaultWriter = new Writer();
+
+  /**
+   * Clears all cached templates in the default writer.
+   */
+  mustache.clearCache = function () {
+    return defaultWriter.clearCache();
+  };
+
+  /**
+   * Parses and caches the given template in the default writer and returns the
+   * array of tokens it contains. Doing this ahead of time avoids the need to
+   * parse templates on the fly as they are rendered.
+   */
+  mustache.parse = function (template, tags) {
+    return defaultWriter.parse(template, tags);
+  };
+
+  /**
+   * Renders the `template` with the given `view` and `partials` using the
+   * default writer.
+   */
+  mustache.render = function (template, view, partials) {
+    return defaultWriter.render(template, view, partials);
+  };
+
+  // This is here for backwards compatibility with 0.4.x.
+  mustache.to_html = function (template, view, partials, send) {
+    var result = mustache.render(template, view, partials);
+
+    if (isFunction(send)) {
+      send(result);
+    } else {
+      return result;
+    }
+  };
+
+  // Export the escaping function so that the user may override it.
+  // See https://github.com/janl/mustache.js/issues/244
+  mustache.escape = escapeHtml;
+
+  // Export these mainly for testing, but also for advanced usage.
+  mustache.Scanner = Scanner;
+  mustache.Context = Context;
+  mustache.Writer = Writer;
+
+}));
+  $.mustache = function (template, view, partials) {
+    return Mustache.render(template, view, partials);
+  };
+
+  $.fn.mustache = function (view, partials) {
+    return $(this).map(function (i, elm) {
+      var template = $.trim($(elm).html());
+      var output = $.mustache(template, view, partials);
+      return $(output).get();
+    });
+  };
+
+})(jQuery);

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.css
----------------------------------------------------------------------
diff --git 
a/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.css 
b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.css
new file mode 100755
index 0000000..37761db
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.css
@@ -0,0 +1 @@
+PRE.jsonFormatter-codeContainer{margin-top:0;margin-bottom:0}PRE.jsonFormatter-codeContainer
 
.jsonFormatter-objectBrace{color:#0a0;font-weight:bold}PRE.jsonFormatter-codeContainer
 
.jsonFormatter-arrayBrace{color:#03f;font-weight:bold}PRE.jsonFormatter-codeContainer
 
.jsonFormatter-propertyName{color:#c00;font-weight:bold}PRE.jsonFormatter-codeContainer
 .jsonFormatter-string{color:#077}PRE.jsonFormatter-codeContainer 
.jsonFormatter-number{color:#a0a}PRE.jsonFormatter-codeContainer 
.jsonFormatter-boolean{color:#00f}PRE.jsonFormatter-codeContainer 
.jsonFormatter-function{color:#a63;font-style:italic}PRE.jsonFormatter-codeContainer
 .jsonFormatter-null{color:#00f}PRE.jsonFormatter-codeContainer 
.jsonFormatter-coma{color:#000;font-weight:bold}PRE.jsonFormatter-codeContainer 
.jsonFormatter-expander{display:inline-block;width:28px;height:11px;cursor:pointer}PRE.jsonFormatter-codeContainer
 
.jsonFormatter-expanded{background:url('data:image/gif;base64,R0lGODlhHAALAMQAAP////7++/z8/Pb29fb18PH
 
x7e/w6/Hw6e3s5unp4+Dg3t3a0djY0dnVy9fTxNbQxtLMv8zJurDC1L+9sMK4p32buAAAAP///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEHABcALAAAAAAcAAsAAAVL4CWOZGmel1StbCWhsFgBdA1UMVwJQd8TuNypMigWD4qgsFQhWJ7PhXI5qhQKCERC0ZhSLxUFo+FwQCJeagUyobjd6aWqtXp979QQADs=')
 /*Expanded.gif*/ no-repeat}PRE.jsonFormatter-codeContainer 
.jsonFormatter-collapsed{background:url('data:image/gif;base64,R0lGODlhHAALAMQAAP////7++/z8/Pb29fb18PHx7e/w6/Hw6e3s5unp4+jm2ODg3t3a0dnVy9bQxtLMv8zJurDC1L+9sMK4p32buDMzMwAAAP///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEHABcALAAAAAAcAAsAAAVU4CWOZGmeV0StLBWhsEgBdA1QMUwJvMUTuNyJMihaBodFUFiiECxQKGMpqlSq14uVRCkUEJbEokHVZrdmrqLRsDgekDLzQoFIJni8nKlqrV5zgYIhADs=')
 /*Collapsed.gif*/ no-repeat}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.js
----------------------------------------------------------------------
diff --git 
a/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.js 
b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.js
new file mode 100755
index 0000000..f2ffcec
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/ui/static/jsonFormatter.min.js
@@ -0,0 +1,2 @@
+(function($){$.fn.jsonFormatter=function(n){var _settings,u=new Date,r=new 
RegExp,i=function(n,t,i){for(var r="",u=0;u<n&&!i;u++)r+=_settings.tab;return 
t!=null&&t.length>0&&t.charAt(t.length-1)!="\n"&&(t=t+"\n"),r+t},f=function(n,t){for(var
 
r,u,f="",i=0;i<n;i++)f+=_settings.tab;for(r=t.toString().split("\n"),u="",i=0;i<r.length;i++)u+=(i==0?"":f)+r[i]+"\n";return
 u},t=function(n,t,r,u,f,e){typeof 
n=="string"&&(n=n.split("<").join("&lt;").split(">").join("&gt;"));var o="<span 
class='"+e+"'>"+t+n+t+r+"<\/span>";return 
f&&(o=i(u,o)),o},_processObject=function(n,e,o,s,h){var c="",l=o?"<span 
class='jsonFormatter-coma'>,<\/span> ":"",v=typeof 
n,a="",y,p,k,w,b;if($.isArray(n))if(n.length==0)c+=i(e,"<span 
class='jsonFormatter-arrayBrace'>[ 
]<\/span>"+l,h);else{for(a=_settings.collapsible?"<span 
class='jsonFormatter-expander jsonFormatter-expanded'><\/span><span 
class='jsonFormatter-collapsible'>":"",c+=i(e,"<span 
class='jsonFormatter-arrayBrace'>[<\/span>"+a,h),y=0;y<n.length;y++)c+=_pr
 
ocessObject(n[y],e+1,y<n.length-1,!0,!1);a=_settings.collapsible?"<\/span>":"";c+=i(e,a+"<span
 class='jsonFormatter-arrayBrace'>]<\/span>"+l)}else 
if(v=="object")if(n==null)c+=t("null","",l,e,s,"jsonFormatter-null");else 
if(n.constructor==u.constructor)c+=t("new Date("+n.getTime()+") 
/*"+n.toLocaleString()+"*/","",l,e,s,"Date");else 
if(n.constructor==r.constructor)c+=t("new 
RegExp("+n+")","",l,e,s,"RegExp");else{p=0;for(w in n)p++;if(p==0)c+=i(e,"<span 
class='jsonFormatter-objectBrace'>{ 
}<\/span>"+l,h);else{a=_settings.collapsible?"<span 
class='jsonFormatter-expander jsonFormatter-expanded'><\/span><span 
class='jsonFormatter-collapsible'>":"";c+=i(e,"<span 
class='jsonFormatter-objectBrace'>{<\/span>"+a,h);k=0;for(w in 
n)b=_settings.quoteKeys?'"':"",c+=i(e+1,"<span 
class='jsonFormatter-propertyName'>"+b+w+b+"<\/span>: 
"+_processObject(n[w],e+1,++k<p,!1,!0));a=_settings.collapsible?"<\/span>":"";c+=i(e,a+"<span
 class='jsonFormatter-objectBrace'>}<\/span>"+l)}}else v=="number"?c+=t(n,
 
"",l,e,s,"jsonFormatter-number"):v=="boolean"?c+=t(n,"",l,e,s,"jsonFormatter-boolean"):v=="function"?n.constructor==r.constructor?c+=t("new
 
RegExp("+n+")","",l,e,s,"RegExp"):(n=f(e,n),c+=t(n,"",l,e,s,"jsonFormatter-function")):c+=v=="undefined"?t("undefined","",l,e,s,"jsonFormatter-null"):t(n.toString().split("\\").join("\\\\").split('"').join('\\"'),'"',l,e,s,"jsonFormatter-string");return
 c},e=function(element){var 
json=$(element).html(),obj,original;json.trim()==""&&(json='""');try{obj=eval("["+json+"]")}catch(exception){return}html=_processObject(obj[0],0,!1,!1,!1);original=$(element).wrapInner("<div
 
class='jsonFormatter-original'><\/div>");_settings.hideOriginal===!0&&$(".jsonFormatter-original",original).hide();original.append("<PRE
 class='jsonFormatter-codeContainer'>"+html+"<\/PRE>")},o=function(){var 
n=$(this).next();n.length<1||($(this).hasClass("jsonFormatter-expanded")==!0?(n.hide(),$(this).removeClass("jsonFormatter-expanded").addClass("jsonFormatter-collapsed")):(n.sho
 
w(),$(this).removeClass("jsonFormatter-collapsed").addClass("jsonFormatter-expanded")))};return
 _settings=$.extend({tab:"  
",quoteKeys:!0,collapsible:!0,hideOriginal:!0},n),this.each(function(n,t){e(t);$(t).on("click",".jsonFormatter-expander",o)})}})(jQuery);
+//# sourceMappingURL=jsonFormatter.min.js.map

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
index 04bad79..3fee22e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
@@ -25,12 +25,7 @@ import org.apache.spark.ui.{UIUtils, WebUIPage}
 
 private[history] class HistoryPage(parent: HistoryServer) extends 
WebUIPage("") {
 
-  private val pageSize = 20
-  private val plusOrMinus = 2
-
   def render(request: HttpServletRequest): Seq[Node] = {
-    val requestedPage = 
Option(request.getParameter("page")).getOrElse("1").toInt
-    val requestedFirst = (requestedPage - 1) * pageSize
     val requestedIncomplete =
       
Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean
 
@@ -38,188 +33,46 @@ private[history] class HistoryPage(parent: HistoryServer) 
extends WebUIPage("")
       .filter(_.attempts.head.completed != requestedIncomplete)
     val allAppsSize = allApps.size
 
-    val actualFirst = if (requestedFirst < allAppsSize) requestedFirst else 0
-    val appsToShow = allApps.slice(actualFirst, actualFirst + pageSize)
-
-    val actualPage = (actualFirst / pageSize) + 1
-    val last = Math.min(actualFirst + pageSize, allAppsSize) - 1
-    val pageCount = allAppsSize / pageSize + (if (allAppsSize % pageSize > 0) 
1 else 0)
-
-    val secondPageFromLeft = 2
-    val secondPageFromRight = pageCount - 1
-
-    val hasMultipleAttempts = appsToShow.exists(_.attempts.size > 1)
-    val appTable =
-      if (hasMultipleAttempts) {
-        // Sorting is disable here as table sort on rowspan has issues.
-        // ref. SPARK-10172
-        UIUtils.listingTable(appWithAttemptHeader, appWithAttemptRow,
-          appsToShow, sortable = false)
-      } else {
-        UIUtils.listingTable(appHeader, appRow, appsToShow)
-      }
-
     val providerConfig = parent.getProviderConfig()
     val content =
       <div class="row-fluid">
-        <div class="span12">
-          <ul class="unstyled">
-            {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> 
{v}</li> }}
-          </ul>
-          {
-            // This displays the indices of pages that are within 
`plusOrMinus` pages of
-            // the current page. Regardless of where the current page is, this 
also links
-            // to the first and last page. If the current page +/- 
`plusOrMinus` is greater
-            // than the 2nd page from the first page or less than the 2nd page 
from the last
-            // page, `...` will be displayed.
+          <div class="span12">
+            <ul class="unstyled">
+              {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> 
{v}</li> }}
+            </ul>
+            {
             if (allAppsSize > 0) {
-              val leftSideIndices =
-                rangeIndices(actualPage - plusOrMinus until actualPage, 1 < _, 
requestedIncomplete)
-              val rightSideIndices =
-                rangeIndices(actualPage + 1 to actualPage + plusOrMinus, _ < 
pageCount,
-                  requestedIncomplete)
-
-              <h4>
-                Showing {actualFirst + 1}-{last + 1} of {allAppsSize}
-                {if (requestedIncomplete) "(Incomplete applications)"}
-                <span style="float: right">
-                  {
-                    if (actualPage > 1) {
-                      <a href={makePageLink(actualPage - 1, 
requestedIncomplete)}>&lt; </a>
-                      <a href={makePageLink(1, requestedIncomplete)}>1</a>
-                    }
-                  }
-                  {if (actualPage - plusOrMinus > secondPageFromLeft) " ... "}
-                  {leftSideIndices}
-                  {actualPage}
-                  {rightSideIndices}
-                  {if (actualPage + plusOrMinus < secondPageFromRight) " ... "}
-                  {
-                    if (actualPage < pageCount) {
-                      <a href={makePageLink(pageCount, 
requestedIncomplete)}>{pageCount}</a>
-                      <a href={makePageLink(actualPage + 1, 
requestedIncomplete)}> &gt;</a>
-                    }
-                  }
-                </span>
-              </h4> ++
-              appTable
+              <script 
src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
+              <div id="history-summary" class="span12 pagination"></div> ++
+              <script src={UIUtils.prependBaseUri("/static/historypage.js")}> 
</script>
             } else if (requestedIncomplete) {
               <h4>No incomplete applications found!</h4>
             } else {
               <h4>No completed applications found!</h4> ++
-              <p>Did you specify the correct logging directory?
-                Please verify your setting of <span style="font-style:italic">
-                spark.history.fs.logDirectory</span> and whether you have the 
permissions to
-                access it.<br /> It is also possible that your application did 
not run to
-                completion or did not stop the SparkContext.
-              </p>
+                <p>Did you specify the correct logging directory?
+                  Please verify your setting of <span 
style="font-style:italic">
+                  spark.history.fs.logDirectory</span> and whether you have 
the permissions to
+                  access it.<br /> It is also possible that your application 
did not run to
+                  completion or did not stop the SparkContext.
+                </p>
             }
-          }
-          <a href={makePageLink(actualPage, !requestedIncomplete)}>
-            {
+            }
+
+            <a href={makePageLink(!requestedIncomplete)}>
+              {
               if (requestedIncomplete) {
                 "Back to completed applications"
               } else {
                 "Show incomplete applications"
               }
-            }
-          </a>
-        </div>
+              }
+            </a>
+          </div>
       </div>
     UIUtils.basicSparkPage(content, "History Server")
   }
 
-  private val appHeader = Seq(
-    "App ID",
-    "App Name",
-    "Started",
-    "Completed",
-    "Duration",
-    "Spark User",
-    "Last Updated")
-
-  private val appWithAttemptHeader = Seq(
-    "App ID",
-    "App Name",
-    "Attempt ID",
-    "Started",
-    "Completed",
-    "Duration",
-    "Spark User",
-    "Last Updated")
-
-  private def rangeIndices(
-      range: Seq[Int],
-      condition: Int => Boolean,
-      showIncomplete: Boolean): Seq[Node] = {
-    range.filter(condition).map(nextPage =>
-      <a href={makePageLink(nextPage, showIncomplete)}> {nextPage} </a>)
-  }
-
-  private def attemptRow(
-      renderAttemptIdColumn: Boolean,
-      info: ApplicationHistoryInfo,
-      attempt: ApplicationAttemptInfo,
-      isFirst: Boolean): Seq[Node] = {
-    val uiAddress = 
UIUtils.prependBaseUri(HistoryServer.getAttemptURI(info.id, attempt.attemptId))
-    val startTime = UIUtils.formatDate(attempt.startTime)
-    val endTime = if (attempt.endTime > 0) UIUtils.formatDate(attempt.endTime) 
else "-"
-    val duration =
-      if (attempt.endTime > 0) {
-        UIUtils.formatDuration(attempt.endTime - attempt.startTime)
-      } else {
-        "-"
-      }
-    val lastUpdated = UIUtils.formatDate(attempt.lastUpdated)
-    <tr>
-      {
-        if (isFirst) {
-          if (info.attempts.size > 1 || renderAttemptIdColumn) {
-            <td rowspan={info.attempts.size.toString} style="background-color: 
#ffffff">
-              <a href={uiAddress}>{info.id}</a></td>
-            <td rowspan={info.attempts.size.toString} style="background-color: 
#ffffff">
-              {info.name}</td>
-          } else {
-            <td><a href={uiAddress}>{info.id}</a></td>
-            <td>{info.name}</td>
-          }
-        } else {
-          Nil
-        }
-      }
-      {
-        if (renderAttemptIdColumn) {
-          if (info.attempts.size > 1 && attempt.attemptId.isDefined) {
-            <td><a href={uiAddress}>{attempt.attemptId.get}</a></td>
-          } else {
-            <td>&nbsp;</td>
-          }
-        } else {
-          Nil
-        }
-      }
-      <td sorttable_customkey={attempt.startTime.toString}>{startTime}</td>
-      <td sorttable_customkey={attempt.endTime.toString}>{endTime}</td>
-      <td sorttable_customkey={(attempt.endTime - attempt.startTime).toString}>
-        {duration}</td>
-      <td>{attempt.sparkUser}</td>
-      <td sorttable_customkey={attempt.lastUpdated.toString}>{lastUpdated}</td>
-    </tr>
-  }
-
-  private def appRow(info: ApplicationHistoryInfo): Seq[Node] = {
-    attemptRow(false, info, info.attempts.head, true)
-  }
-
-  private def appWithAttemptRow(info: ApplicationHistoryInfo): Seq[Node] = {
-    attemptRow(true, info, info.attempts.head, true) ++
-      info.attempts.drop(1).flatMap(attemptRow(true, info, _, false))
-  }
-
-  private def makePageLink(linkPage: Int, showIncomplete: Boolean): String = {
-    UIUtils.prependBaseUri("/?" + Array(
-      "page=" + linkPage,
-      "showIncomplete=" + showIncomplete
-      ).mkString("&"))
+  private def makePageLink(showIncomplete: Boolean): String = {
+    UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
 
b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
index 0fc0fb5..0f30183 100644
--- 
a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
+++ 
b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
@@ -71,6 +71,13 @@ private[spark] object ApplicationsListResource {
           attemptId = internalAttemptInfo.attemptId,
           startTime = new Date(internalAttemptInfo.startTime),
           endTime = new Date(internalAttemptInfo.endTime),
+          duration =
+            if (internalAttemptInfo.endTime > 0) {
+              internalAttemptInfo.endTime - internalAttemptInfo.startTime
+            } else {
+              0
+            },
+          lastUpdated = new Date(internalAttemptInfo.lastUpdated),
           sparkUser = internalAttemptInfo.sparkUser,
           completed = internalAttemptInfo.completed
         )
@@ -93,6 +100,13 @@ private[spark] object ApplicationsListResource {
         attemptId = None,
         startTime = new Date(internal.startTime),
         endTime = new Date(internal.endTime),
+        duration =
+          if (internal.endTime > 0) {
+            internal.endTime - internal.startTime
+          } else {
+            0
+          },
+        lastUpdated = new Date(internal.endTime),
         sparkUser = internal.desc.user,
         completed = completed
       ))

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala 
b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
index 3adf5b1..2b0079f 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
@@ -35,6 +35,8 @@ class ApplicationAttemptInfo private[spark](
     val attemptId: Option[String],
     val startTime: Date,
     val endTime: Date,
+    val lastUpdated: Date,
+    val duration: Long,
     val sparkUser: String,
     val completed: Boolean = false)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala 
b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index cf45414..6cc30ee 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -114,6 +114,8 @@ private[spark] class SparkUI private (
         attemptId = None,
         startTime = new Date(startTime),
         endTime = new Date(-1),
+        duration = 0,
+        lastUpdated = new Date(startTime),
         sparkUser = "",
         completed = false
       ))

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala 
b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 1949c4b..4ebee90 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -157,11 +157,22 @@ private[spark] object UIUtils extends Logging {
   def commonHeaderNodes: Seq[Node] = {
     <meta http-equiv="Content-type" content="text/html; charset=utf-8" />
     <link rel="stylesheet" href={prependBaseUri("/static/bootstrap.min.css")} 
type="text/css"/>
+    <link rel="stylesheet"
+          href={prependBaseUri("/static/jquery.dataTables.1.10.4.min.css")} 
type="text/css"/>
+    <link rel="stylesheet"
+          href={prependBaseUri("/static/dataTables.bootstrap.css")} 
type="text/css"/>
+    <link rel="stylesheet" 
href={prependBaseUri("/static/jsonFormatter.min.css")} type="text/css"/>
     <link rel="stylesheet" href={prependBaseUri("/static/vis.min.css")} 
type="text/css"/>
     <link rel="stylesheet" href={prependBaseUri("/static/webui.css")} 
type="text/css"/>
     <link rel="stylesheet" href={prependBaseUri("/static/timeline-view.css")} 
type="text/css"/>
     <script src={prependBaseUri("/static/sorttable.js")} ></script>
     <script src={prependBaseUri("/static/jquery-1.11.1.min.js")}></script>
+    <script 
src={prependBaseUri("/static/jquery.dataTables.1.10.4.min.js")}></script>
+    <script 
src={prependBaseUri("/static/jquery.cookies.2.2.0.min.js")}></script>
+    <script src={prependBaseUri("/static/jquery.blockUI.min.js")}></script>
+    <script 
src={prependBaseUri("/static/dataTables.bootstrap.min.js")}></script>
+    <script src={prependBaseUri("/static/jsonFormatter.min.js")}></script>
+    <script src={prependBaseUri("/static/jquery.mustache.js")}></script>
     <script src={prependBaseUri("/static/vis.min.js")}></script>
     <script src={prependBaseUri("/static/bootstrap-tooltip.js")}></script>
     <script src={prependBaseUri("/static/initialize-tooltips.js")}></script>

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/application_list_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/application_list_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/application_list_json_expectation.json
index d575bf2..5bbb4ce 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/application_list_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/application_list_json_expectation.json
@@ -4,6 +4,8 @@
   "attempts" : [ {
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:11.398GMT",
+    "lastUpdated" : "",
+    "duration" : 10505,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -14,12 +16,16 @@
     "attemptId" : "2",
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:00.950GMT",
+    "lastUpdated" : "",
+    "duration" : 57,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-05-06T13:03:00.880GMT",
     "endTime" : "2015-05-06T13:03:00.890GMT",
+    "lastUpdated" : "",
+    "duration" : 10,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -30,12 +36,16 @@
     "attemptId" : "2",
     "startTime" : "2015-03-17T23:11:50.242GMT",
     "endTime" : "2015-03-17T23:12:25.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-03-16T19:25:10.242GMT",
     "endTime" : "2015-03-16T19:25:45.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -45,6 +55,8 @@
   "attempts" : [ {
     "startTime" : "2015-02-28T00:02:38.277GMT",
     "endTime" : "2015-02-28T00:02:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -54,6 +66,8 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:59.720GMT",
     "endTime" : "2015-02-03T16:43:08.731GMT",
+    "lastUpdated" : "",
+    "duration" : 9011,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -63,7 +77,9 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:38.277GMT",
     "endTime" : "2015-02-03T16:42:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-} ]
\ No newline at end of file
+} ]

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/completed_app_list_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/completed_app_list_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/completed_app_list_json_expectation.json
index d575bf2..5bbb4ce 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/completed_app_list_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/completed_app_list_json_expectation.json
@@ -4,6 +4,8 @@
   "attempts" : [ {
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:11.398GMT",
+    "lastUpdated" : "",
+    "duration" : 10505,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -14,12 +16,16 @@
     "attemptId" : "2",
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:00.950GMT",
+    "lastUpdated" : "",
+    "duration" : 57,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-05-06T13:03:00.880GMT",
     "endTime" : "2015-05-06T13:03:00.890GMT",
+    "lastUpdated" : "",
+    "duration" : 10,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -30,12 +36,16 @@
     "attemptId" : "2",
     "startTime" : "2015-03-17T23:11:50.242GMT",
     "endTime" : "2015-03-17T23:12:25.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-03-16T19:25:10.242GMT",
     "endTime" : "2015-03-16T19:25:45.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -45,6 +55,8 @@
   "attempts" : [ {
     "startTime" : "2015-02-28T00:02:38.277GMT",
     "endTime" : "2015-02-28T00:02:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -54,6 +66,8 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:59.720GMT",
     "endTime" : "2015-02-03T16:43:08.731GMT",
+    "lastUpdated" : "",
+    "duration" : 9011,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -63,7 +77,9 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:38.277GMT",
     "endTime" : "2015-02-03T16:42:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-} ]
\ No newline at end of file
+} ]

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/maxDate2_app_list_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/maxDate2_app_list_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/maxDate2_app_list_json_expectation.json
index 483632a..3f80a52 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/maxDate2_app_list_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/maxDate2_app_list_json_expectation.json
@@ -4,7 +4,9 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:38.277GMT",
     "endTime" : "2015-02-03T16:42:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-} ]
\ No newline at end of file
+} ]

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/maxDate_app_list_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/maxDate_app_list_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/maxDate_app_list_json_expectation.json
index 4b85690..508bdc1 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/maxDate_app_list_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/maxDate_app_list_json_expectation.json
@@ -4,6 +4,8 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:59.720GMT",
     "endTime" : "2015-02-03T16:43:08.731GMT",
+    "lastUpdated" : "",
+    "duration" : 9011,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -13,7 +15,9 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:38.277GMT",
     "endTime" : "2015-02-03T16:42:46.912GMT",
+    "lastUpdated" : "",
+    "duration" : 8635,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-} ]
\ No newline at end of file
+} ]

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/minDate_app_list_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/minDate_app_list_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/minDate_app_list_json_expectation.json
index 15c2de8..5dca7d7 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/minDate_app_list_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/minDate_app_list_json_expectation.json
@@ -4,6 +4,8 @@
   "attempts" : [ {
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:11.398GMT",
+    "lastUpdated" : "",
+    "duration" : 10505,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -14,12 +16,16 @@
     "attemptId" : "2",
     "startTime" : "2015-05-06T13:03:00.893GMT",
     "endTime" : "2015-05-06T13:03:00.950GMT",
+    "lastUpdated" : "",
+    "duration" : 57,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-05-06T13:03:00.880GMT",
     "endTime" : "2015-05-06T13:03:00.890GMT",
+    "lastUpdated" : "",
+    "duration" : 10,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -30,12 +36,16 @@
     "attemptId" : "2",
     "startTime" : "2015-03-17T23:11:50.242GMT",
     "endTime" : "2015-03-17T23:12:25.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-03-16T19:25:10.242GMT",
     "endTime" : "2015-03-16T19:25:45.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
@@ -46,8 +56,10 @@
       {
         "startTime": "2015-02-28T00:02:38.277GMT",
         "endTime": "2015-02-28T00:02:46.912GMT",
+        "lastUpdated" : "",
+        "duration" : 8635,
         "sparkUser": "irashid",
         "completed": true
       }
     ]
-} ]
\ No newline at end of file
+} ]

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/one_app_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/one_app_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/one_app_json_expectation.json
index 07489ad..cca32c7 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/one_app_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/one_app_json_expectation.json
@@ -4,7 +4,9 @@
   "attempts" : [ {
     "startTime" : "2015-02-03T16:42:59.720GMT",
     "endTime" : "2015-02-03T16:43:08.731GMT",
+    "lastUpdated" : "",
+    "duration" : 9011,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/resources/HistoryServerExpectations/one_app_multi_attempt_json_expectation.json
----------------------------------------------------------------------
diff --git 
a/core/src/test/resources/HistoryServerExpectations/one_app_multi_attempt_json_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/one_app_multi_attempt_json_expectation.json
index 8f3d716..1ea1779 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/one_app_multi_attempt_json_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/one_app_multi_attempt_json_expectation.json
@@ -5,13 +5,17 @@
     "attemptId" : "2",
     "startTime" : "2015-03-17T23:11:50.242GMT",
     "endTime" : "2015-03-17T23:12:25.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   }, {
     "attemptId" : "1",
     "startTime" : "2015-03-16T19:25:10.242GMT",
     "endTime" : "2015-03-16T19:25:45.177GMT",
+    "lastUpdated" : "",
+    "duration" : 34935,
     "sparkUser" : "irashid",
     "completed" : true
   } ]
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 18659fc..be55b2e 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -139,7 +139,24 @@ class HistoryServerSuite extends SparkFunSuite with 
BeforeAndAfter with Matchers
       code should be (HttpServletResponse.SC_OK)
       jsonOpt should be ('defined)
       errOpt should be (None)
-      val json = jsonOpt.get
+      val jsonOrg = jsonOpt.get
+
+      // SPARK-10873 added the lastUpdated field for each application's 
attempt,
+      // the REST API returns the last modified time of EVENT LOG file for 
this field.
+      // It is not applicable to hard-code this dynamic field in a static 
expected file,
+      // so here we skip checking the lastUpdated field's value (setting it as 
"").
+      val json = if (jsonOrg.indexOf("lastUpdated") >= 0) {
+        val subStrings = jsonOrg.split(",")
+        for (i <- subStrings.indices) {
+          if (subStrings(i).indexOf("lastUpdated") >= 0) {
+            subStrings(i) = "\"lastUpdated\":\"\""
+          }
+        }
+        subStrings.mkString(",")
+      } else {
+        jsonOrg
+      }
+
       val exp = IOUtils.toString(new FileInputStream(
         new File(expRoot, HistoryServerSuite.sanitizePath(name) + 
"_expectation.json")))
       // compare the ASTs so formatting differences don't cause failures
@@ -241,30 +258,6 @@ class HistoryServerSuite extends SparkFunSuite with 
BeforeAndAfter with Matchers
     getContentAndCode("foobar")._1 should be (HttpServletResponse.SC_NOT_FOUND)
   }
 
-  test("generate history page with relative links") {
-    val historyServer = mock[HistoryServer]
-    val request = mock[HttpServletRequest]
-    val ui = mock[SparkUI]
-    val link = "/history/app1"
-    val info = new ApplicationHistoryInfo("app1", "app1",
-      List(ApplicationAttemptInfo(None, 0, 2, 1, "xxx", true)))
-    when(historyServer.getApplicationList()).thenReturn(Seq(info))
-    when(ui.basePath).thenReturn(link)
-    when(historyServer.getProviderConfig()).thenReturn(Map[String, String]())
-    val page = new HistoryPage(historyServer)
-
-    // when
-    val response = page.render(request)
-
-    // then
-    val links = response \\ "a"
-    val justHrefs = for {
-      l <- links
-      attrs <- l.attribute("href")
-    } yield (attrs.toString)
-    justHrefs should contain (UIUtils.prependBaseUri(resource = link))
-  }
-
   test("relative links are prefixed with uiRoot (spark.ui.proxyBase)") {
     val proxyBaseBeforeTest = System.getProperty("spark.ui.proxyBase")
     val uiRoot = 
Option(System.getenv("APPLICATION_WEB_PROXY_BASE")).getOrElse("/testwebproxybase")

http://git-wip-us.apache.org/repos/asf/spark/blob/e4c1162b/project/MimaExcludes.scala
----------------------------------------------------------------------
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 968a290..a3ae4d2 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -45,6 +45,10 @@ object MimaExcludes {
         excludePackage("org.apache.spark.sql.execution"),
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.mllib.feature.PCAModel.this"),
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.status.api.v1.StageData.this"),
+        ProblemFilters.exclude[MissingMethodProblem](
+          "org.apache.spark.status.api.v1.ApplicationAttemptInfo.this"),
+        ProblemFilters.exclude[MissingMethodProblem](
+          
"org.apache.spark.status.api.v1.ApplicationAttemptInfo.<init>$default$5"),
         // SPARK-12600 Remove SQL deprecated methods
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$QueryExecution"),
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SQLContext$SparkPlanner"),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to