Revision: 24100
Author: [email protected]
Date: Fri Sep 19 18:04:45 2014 UTC
Log: Version 3.29.81 (based on bleeding_edge revision r24099)
Remove a couple of deprecated APIs that moved to Isolate.
ARM: Make stack limit stricter to account for large buffers in
MacroAssembler (Chromium issue 405338).
Reland 24052 - Require V8 to be explicitly initialized before an Isolate is
created.
Require V8 to be explicitly initialized before an Isolate is created.
Performance and stability improvements on all platforms.
https://code.google.com/p/v8/source/detail?r=24100
Added:
/trunk/src/compiler/js-builtin-reducer.cc
/trunk/src/compiler/js-builtin-reducer.h
/trunk/test/mjsunit/harmony/regexp-sticky.js
/trunk/test/mjsunit/harmony/super.js
/trunk/test/mjsunit/regexp-not-sticky-yet.js
/trunk/test/mjsunit/runtime-gen/loadfromsuper.js
Modified:
/trunk/BUILD.gn
/trunk/ChangeLog
/trunk/include/v8.h
/trunk/samples/lineprocessor.cc
/trunk/samples/process.cc
/trunk/samples/shell.cc
/trunk/src/api.cc
/trunk/src/arm/full-codegen-arm.cc
/trunk/src/arm/lithium-codegen-arm.cc
/trunk/src/arm/lithium-codegen-arm.h
/trunk/src/arm64/full-codegen-arm64.cc
/trunk/src/arm64/lithium-codegen-arm64.cc
/trunk/src/arm64/lithium-codegen-arm64.h
/trunk/src/base/utils/random-number-generator.cc
/trunk/src/base/utils/random-number-generator.h
/trunk/src/bootstrapper.cc
/trunk/src/compilation-cache.cc
/trunk/src/compiler/ia32/code-generator-ia32.cc
/trunk/src/compiler/ia32/instruction-selector-ia32.cc
/trunk/src/compiler/js-typed-lowering.cc
/trunk/src/compiler/js-typed-lowering.h
/trunk/src/compiler/pipeline.cc
/trunk/src/compiler/representation-change.h
/trunk/src/compiler/simplified-lowering.cc
/trunk/src/compiler/x64/code-generator-x64.cc
/trunk/src/compiler/x64/instruction-selector-x64.cc
/trunk/src/compiler.cc
/trunk/src/compiler.h
/trunk/src/counters.h
/trunk/src/d8.cc
/trunk/src/deoptimizer.h
/trunk/src/factory.cc
/trunk/src/flag-definitions.h
/trunk/src/full-codegen.cc
/trunk/src/full-codegen.h
/trunk/src/globals.h
/trunk/src/heap/gc-idle-time-handler.cc
/trunk/src/heap/heap-inl.h
/trunk/src/heap/heap.h
/trunk/src/hydrogen.cc
/trunk/src/hydrogen.h
/trunk/src/ia32/assembler-ia32.cc
/trunk/src/ia32/assembler-ia32.h
/trunk/src/ia32/full-codegen-ia32.cc
/trunk/src/ia32/lithium-codegen-ia32.cc
/trunk/src/ia32/lithium-codegen-ia32.h
/trunk/src/ic/ic.cc
/trunk/src/ic/x87/ic-x87.cc
/trunk/src/isolate.cc
/trunk/src/isolate.h
/trunk/src/jsregexp.cc
/trunk/src/jsregexp.h
/trunk/src/lithium-codegen.cc
/trunk/src/lithium-codegen.h
/trunk/src/messages.js
/trunk/src/mips/full-codegen-mips.cc
/trunk/src/mips/lithium-codegen-mips.cc
/trunk/src/mips/lithium-codegen-mips.h
/trunk/src/mips64/full-codegen-mips64.cc
/trunk/src/mips64/lithium-codegen-mips64.cc
/trunk/src/mips64/lithium-codegen-mips64.h
/trunk/src/mksnapshot.cc
/trunk/src/msan.h
/trunk/src/objects-inl.h
/trunk/src/objects.cc
/trunk/src/objects.h
/trunk/src/parser.cc
/trunk/src/regexp.js
/trunk/src/runtime.cc
/trunk/src/runtime.h
/trunk/src/scopeinfo.cc
/trunk/src/scopes.cc
/trunk/src/scopes.h
/trunk/src/snapshot-common.cc
/trunk/src/snapshot-external.cc
/trunk/src/snapshot.h
/trunk/src/v8.cc
/trunk/src/v8.h
/trunk/src/version.cc
/trunk/src/x64/assembler-x64.cc
/trunk/src/x64/assembler-x64.h
/trunk/src/x64/full-codegen-x64.cc
/trunk/src/x64/lithium-codegen-x64.cc
/trunk/src/x64/lithium-codegen-x64.h
/trunk/src/x87/code-stubs-x87.cc
/trunk/src/x87/full-codegen-x87.cc
/trunk/src/x87/lithium-codegen-x87.cc
/trunk/src/x87/lithium-codegen-x87.h
/trunk/src/x87/macro-assembler-x87.cc
/trunk/test/cctest/cctest.cc
/trunk/test/cctest/cctest.h
/trunk/test/cctest/compiler/test-js-typed-lowering.cc
/trunk/test/cctest/compiler/test-representation-change.cc
/trunk/test/cctest/compiler/test-run-machops.cc
/trunk/test/cctest/compiler/test-run-properties.cc
/trunk/test/cctest/compiler/test-simplified-lowering.cc
/trunk/test/cctest/test-api.cc
/trunk/test/cctest/test-ast.cc
/trunk/test/cctest/test-dataflow.cc
/trunk/test/cctest/test-debug.cc
/trunk/test/cctest/test-deoptimization.cc
/trunk/test/cctest/test-disasm-ia32.cc
/trunk/test/cctest/test-disasm-x64.cc
/trunk/test/cctest/test-heap.cc
/trunk/test/cctest/test-liveedit.cc
/trunk/test/cctest/test-log.cc
/trunk/test/cctest/test-macro-assembler-arm.cc
/trunk/test/cctest/test-macro-assembler-ia32.cc
/trunk/test/cctest/test-macro-assembler-x64.cc
/trunk/test/cctest/test-macro-assembler-x87.cc
/trunk/test/cctest/test-random-number-generator.cc
/trunk/test/cctest/test-regexp.cc
/trunk/test/cctest/test-serialize.cc
/trunk/test/cctest/test-spaces.cc
/trunk/tools/gyp/v8.gyp
/trunk/tools/lexer-shell.cc
/trunk/tools/parser-shell.cc
/trunk/tools/push-to-trunk/auto_push.py
/trunk/tools/push-to-trunk/auto_tag.py
/trunk/tools/push-to-trunk/chromium_roll.py
/trunk/tools/push-to-trunk/common_includes.py
/trunk/tools/push-to-trunk/merge_to_branch.py
/trunk/tools/push-to-trunk/push_to_trunk.py
/trunk/tools/push-to-trunk/releases.py
/trunk/tools/push-to-trunk/test_scripts.py
=======================================
--- /dev/null
+++ /trunk/src/compiler/js-builtin-reducer.cc Fri Sep 19 18:04:45 2014 UTC
@@ -0,0 +1,114 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/compiler/graph-inl.h"
+#include "src/compiler/js-builtin-reducer.h"
+#include "src/compiler/node-matchers.h"
+#include "src/compiler/node-properties-inl.h"
+#include "src/types.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+
+// Helper method that assumes replacement nodes are pure values that don't
+// produce an effect. Replaces {node} with {reduction} and relaxes effects.
+static Reduction ReplaceWithPureReduction(Node* node, Reduction reduction)
{
+ if (reduction.Changed()) {
+ NodeProperties::ReplaceWithValue(node, reduction.replacement());
+ return reduction;
+ }
+ return Reducer::NoChange();
+}
+
+
+// Helper class to access JSCallFunction nodes that are potential
candidates
+// for reduction when they have a BuiltinFunctionId associated with them.
+class JSCallReduction {
+ public:
+ explicit JSCallReduction(Node* node) : node_(node) {}
+
+ // Determines whether the node is a JSCallFunction operation that
targets a
+ // constant callee being a well-known builtin with a BuiltinFunctionId.
+ bool HasBuiltinFunctionId() {
+ if (node_->opcode() != IrOpcode::kJSCallFunction) return false;
+ HeapObjectMatcher<Object> m(NodeProperties::GetValueInput(node_, 0));
+ if (!m.HasValue() || !m.Value().handle()->IsJSFunction()) return false;
+ Handle<JSFunction> function =
Handle<JSFunction>::cast(m.Value().handle());
+ return function->shared()->HasBuiltinFunctionId();
+ }
+
+ // Retrieves the BuiltinFunctionId as described above.
+ BuiltinFunctionId GetBuiltinFunctionId() {
+ DCHECK_EQ(IrOpcode::kJSCallFunction, node_->opcode());
+ HeapObjectMatcher<Object> m(NodeProperties::GetValueInput(node_, 0));
+ Handle<JSFunction> function =
Handle<JSFunction>::cast(m.Value().handle());
+ return function->shared()->builtin_function_id();
+ }
+
+ // Determines whether the call takes one input of the given type.
+ bool InputsMatch(Type* t1) {
+ return GetJSCallArity() == 1 &&
+ NodeProperties::GetBounds(GetJSCallInput(0)).upper->Is(t1);
+ }
+
+ // Determines whether the call takes two inputs of the given types.
+ bool InputsMatch(Type* t1, Type* t2) {
+ return GetJSCallArity() == 2 &&
+ NodeProperties::GetBounds(GetJSCallInput(0)).upper->Is(t1) &&
+ NodeProperties::GetBounds(GetJSCallInput(1)).upper->Is(t2);
+ }
+
+ Node* left() { return GetJSCallInput(0); }
+ Node* right() { return GetJSCallInput(1); }
+
+ protected:
+ int GetJSCallArity() {
+ DCHECK_EQ(IrOpcode::kJSCallFunction, node_->opcode());
+ // Skip first (i.e. callee) and second (i.e. receiver) operand.
+ return OperatorProperties::GetValueInputCount(node_->op()) - 2;
+ }
+
+ Node* GetJSCallInput(int index) {
+ DCHECK_EQ(IrOpcode::kJSCallFunction, node_->opcode());
+ DCHECK_LT(index, GetJSCallArity());
+ // Skip first (i.e. callee) and second (i.e. receiver) operand.
+ return NodeProperties::GetValueInput(node_, index + 2);
+ }
+
+ private:
+ Node* node_;
+};
+
+
+// ES6 draft 08-24-14, section 20.2.2.19.
+Reduction JSBuiltinReducer::ReduceMathImul(Node* node) {
+ JSCallReduction r(node);
+ if (r.InputsMatch(Type::Integral32(), Type::Integral32())) {
+ // Math.imul(a:int32, b:int32) -> Int32Mul(a, b)
+ Node* value = graph()->NewNode(machine()->Int32Mul(), r.left(),
r.right());
+ return Replace(value);
+ }
+ return NoChange();
+}
+
+
+Reduction JSBuiltinReducer::Reduce(Node* node) {
+ JSCallReduction r(node);
+
+ // Dispatch according to the BuiltinFunctionId if present.
+ if (!r.HasBuiltinFunctionId()) return NoChange();
+ switch (r.GetBuiltinFunctionId()) {
+ case kMathImul:
+ return ReplaceWithPureReduction(node, ReduceMathImul(node));
+ default:
+ break;
+ }
+ return NoChange();
+}
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
=======================================
--- /dev/null
+++ /trunk/src/compiler/js-builtin-reducer.h Fri Sep 19 18:04:45 2014 UTC
@@ -0,0 +1,42 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef V8_COMPILER_JS_BUILTIN_REDUCER_H_
+#define V8_COMPILER_JS_BUILTIN_REDUCER_H_
+
+#include "src/compiler/graph-reducer.h"
+#include "src/compiler/js-graph.h"
+#include "src/compiler/machine-operator.h"
+#include "src/compiler/node.h"
+#include "src/compiler/simplified-operator.h"
+
+namespace v8 {
+namespace internal {
+namespace compiler {
+
+class JSBuiltinReducer FINAL : public Reducer {
+ public:
+ explicit JSBuiltinReducer(JSGraph* jsgraph)
+ : jsgraph_(jsgraph), simplified_(jsgraph->zone()) {}
+ virtual ~JSBuiltinReducer() {}
+
+ virtual Reduction Reduce(Node* node) OVERRIDE;
+
+ private:
+ Graph* graph() { return jsgraph_->graph(); }
+ CommonOperatorBuilder* common() { return jsgraph_->common(); }
+ MachineOperatorBuilder* machine() { return jsgraph_->machine(); }
+ SimplifiedOperatorBuilder* simplified() { return &simplified_; }
+
+ Reduction ReduceMathImul(Node* node);
+
+ JSGraph* jsgraph_;
+ SimplifiedOperatorBuilder simplified_;
+};
+
+} // namespace compiler
+} // namespace internal
+} // namespace v8
+
+#endif // V8_COMPILER_JS_BUILTIN_REDUCER_H_
=======================================
--- /dev/null
+++ /trunk/test/mjsunit/harmony/regexp-sticky.js Fri Sep 19 18:04:45 2014
UTC
@@ -0,0 +1,132 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-regexps
+
+var re = /foo.bar/;
+
+assertTrue(!!"foo*bar".match(re));
+assertTrue(!!"..foo*bar".match(re));
+
+var plain = /foobar/;
+
+assertTrue(!!"foobar".match(plain));
+assertTrue(!!"..foobar".match(plain));
+
+var sticky = /foo.bar/y;
+
+assertTrue(!!"foo*bar".match(sticky));
+assertEquals(0, sticky.lastIndex);
+assertFalse(!!"..foo*bar".match(sticky));
+
+var stickyplain = /foobar/y;
+
+assertTrue(!!"foobar".match(stickyplain));
+assertEquals(0, stickyplain.lastIndex);
+assertFalse(!!"..foobar".match(stickyplain));
+
+var global = /foo.bar/g;
+
+assertTrue(global.test("foo*bar"));
+assertFalse(global.test("..foo*bar"));
+global.lastIndex = 0;
+assertTrue(global.test("..foo*bar"));
+
+var plainglobal = /foobar/g;
+
+assertTrue(plainglobal.test("foobar"));
+assertFalse(plainglobal.test("foobar"));
+plainglobal.lastIndex = 0;
+assertTrue(plainglobal.test("foobar"));
+
+var stickyglobal = /foo.bar/gy;
+
+assertTrue(stickyglobal.test("foo*bar"));
+assertEquals(7, stickyglobal.lastIndex);
+assertFalse(stickyglobal.test("..foo*bar"));
+stickyglobal.lastIndex = 0;
+assertFalse(stickyglobal.test("..foo*bar"));
+stickyglobal.lastIndex = 2;
+assertTrue(stickyglobal.test("..foo*bar"));
+assertEquals(9, stickyglobal.lastIndex);
+
+var stickyplainglobal = /foobar/yg;
+assertTrue(stickyplainglobal.sticky);
+stickyplainglobal.sticky = false;
+
+assertTrue(stickyplainglobal.test("foobar"));
+assertEquals(6, stickyplainglobal.lastIndex);
+assertFalse(stickyplainglobal.test("..foobar"));
+stickyplainglobal.lastIndex = 0;
+assertFalse(stickyplainglobal.test("..foobar"));
+stickyplainglobal.lastIndex = 2;
+assertTrue(stickyplainglobal.test("..foobar"));
+assertEquals(8, stickyplainglobal.lastIndex);
+
+assertEquals("/foo.bar/gy", "" + stickyglobal);
+assertEquals("/foo.bar/g", "" + global);
+
+assertTrue(stickyglobal.sticky);
+stickyglobal.sticky = false;
+assertTrue(stickyglobal.sticky);
+
+var stickyglobal2 = new RegExp("foo.bar", "gy");
+assertTrue(stickyglobal2.test("foo*bar"));
+assertEquals(7, stickyglobal2.lastIndex);
+assertFalse(stickyglobal2.test("..foo*bar"));
+stickyglobal2.lastIndex = 0;
+assertFalse(stickyglobal2.test("..foo*bar"));
+stickyglobal2.lastIndex = 2;
+assertTrue(stickyglobal2.test("..foo*bar"));
+assertEquals(9, stickyglobal2.lastIndex);
+
+assertEquals("/foo.bar/gy", "" + stickyglobal2);
+
+assertTrue(stickyglobal2.sticky);
+stickyglobal2.sticky = false;
+assertTrue(stickyglobal2.sticky);
+
+sticky.lastIndex = -1; // Causes sticky regexp to fail fast
+assertFalse(sticky.test("..foo.bar"));
+assertEquals(0, sticky.lastIndex);
+
+sticky.lastIndex = -1; // Causes sticky regexp to fail fast
+assertFalse(!!sticky.exec("..foo.bar"));
+assertEquals(0, sticky.lastIndex);
+
+// ES6 draft says: Even when the y flag is used with a pattern, ^ always
+// matches only at the beginning of Input, or (if Multiline is true) at the
+// beginning of a line.
+var hat = /^foo/y;
+hat.lastIndex = 2;
+assertFalse(hat.test("..foo"));
+
+var mhat = /^foo/my;
+mhat.lastIndex = 2;
+assertFalse(mhat.test("..foo"));
+mhat.lastIndex = 2;
+assertTrue(mhat.test(".\nfoo"));
=======================================
--- /dev/null
+++ /trunk/test/mjsunit/harmony/super.js Fri Sep 19 18:04:45 2014 UTC
@@ -0,0 +1,127 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Flags: --harmony-classes
+
+
+(function TestSuperNamedLoads() {
+ function Base() { }
+ function Derived() {
+ this.derivedDataProperty = "xxx";
+ }
+ Derived.prototype = Object.create(Base.prototype);
+
+ function fBase() { return "Base " + this.toString(); }
+
+ Base.prototype.f = fBase.toMethod(Base.prototype);
+
+ function fDerived() {
+ assertEquals("Base this is Derived", super.f());
+ assertEquals(15, super.x);
+ assertEquals(27, this.x);
+
+ return "Derived"
+ }
+
+ Base.prototype.x = 15;
+ Base.prototype.toString = function() { return "this is Base"; };
+ Derived.prototype.toString = function() { return "this is Derived"; };
+ Derived.prototype.x = 27;
+ Derived.prototype.f = fDerived.toMethod(Derived.prototype);
+
+ assertEquals("Base this is Base", new Base().f());
+ assertEquals("Derived", new Derived().f());
+}());
+
+(function TestSuperKeywordNonMethod() {
+ function f() {
+ super.unknown();
+ }
+
+ assertThrows(f, ReferenceError);
+}());
+
+
+(function TestGetter() {
+ function Base() {}
+ var derived;
+ Base.prototype = {
+ constructor: Base,
+ get x() {
+ assertSame(this, derived);
+ return this._x;
+ },
+ _x: 'base'
+ };
+
+ function Derived() {}
+ Derived.__proto__ = Base;
+ Derived.prototype = {
+ __proto__: Base.prototype,
+ constructor: Derived,
+ _x: 'derived'
+ };
+ Derived.prototype.testGetter = function() {
+ return super.x;
+ }.toMethod(Derived.prototype);
+ derived = new Derived();
+ assertEquals('derived', derived.testGetter());
+}());
+
+/*
+ * TODO[dslomov]: named stores and keyed loads/stores not implemented yet.
+(function TestSetter() {
+ function Base() {}
+ Base.prototype = {
+ constructor: Base,
+ get x() {
+ return this._x;
+ },
+ set x(v) {
+ this._x = v;
+ },
+ _x: 'base'
+ };
+
+ function Derived() {}
+ Derived.__proto__ = Base;
+ Derived.prototype = {
+ __proto__: Base.prototype,
+ constructor: Derived,
+ _x: 'derived'
+ };
+ Derived.prototype.testSetter = function() {
+ super.x = 'foobar';
+ }.toMethod(Derived.prototype);
+ var d = new Derived();
+ d.testSetter();
+ assertEquals('base', Base.prototype._x);
+ assertEquals('foobar', d._x);
+}());
+
+
+(function TestKeyedGetter() {
+ function Base() {}
+ Base.prototype = {
+ constructor: Base,
+ _x: 'base'
+ };
+
+ Object.defineProperty(Base.prototype, '0',
+ { get: function() { return this._x; } });
+
+ function Derived() {}
+ Derived.__proto__ = Base;
+ Derived.prototype = {
+ __proto__: Base.prototype,
+ constructor: Derived,
+ _x: 'derived'
+ };
+ Derived.prototype.testGetter = function() {
+ return super[0];
+ }.toMethod(Derived.prototype);
+ assertEquals('derived', new Derived()[0]);
+ // assertEquals('derived', new Derived().testGetter());
+}());
+*/
=======================================
--- /dev/null
+++ /trunk/test/mjsunit/regexp-not-sticky-yet.js Fri Sep 19 18:04:45 2014
UTC
@@ -0,0 +1,65 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Test that sticky regexp support is not affecting V8 when the
+// --harmony-regexps flag is not on.
+
+assertThrows(function() { eval("/foo.bar/y"); }, SyntaxError);
+assertThrows(function() { eval("/foobar/y"); }, SyntaxError);
+assertThrows(function() { eval("/foo.bar/gy"); }, SyntaxError);
+assertThrows(function() { eval("/foobar/gy"); }, SyntaxError);
+assertThrows(function() { new RegExp("foo.bar", "y"); }, SyntaxError);
+assertThrows(function() { new RegExp("foobar", "y"); }, SyntaxError);
+assertThrows(function() { new RegExp("foo.bar", "gy"); }, SyntaxError);
+assertThrows(function() { new RegExp("foobar", "gy"); }, SyntaxError);
+
+var re = /foo.bar/;
+assertEquals("/foo.bar/", "" + re);
+var plain = /foobar/;
+assertEquals("/foobar/", "" + plain);
+
+re.compile("foo.bar");
+assertEquals(void 0, re.sticky);
+
+var global = /foo.bar/g;
+assertEquals("/foo.bar/g", "" + global);
+var plainglobal = /foobar/g;
+assertEquals("/foobar/g", "" + plainglobal);
+
+assertEquals(void 0, re.sticky);
+re.sticky = true; // Has no effect on the regexp, just sets a property.
+assertTrue(re.sticky);
+
+assertTrue(re.test("..foo.bar"));
+
+re.lastIndex = -1; // Ignored for non-global, non-sticky.
+assertTrue(re.test("..foo.bar"));
+assertEquals(-1, re.lastIndex);
+
+re.lastIndex = -1; // Ignored for non-global, non-sticky.
+assertTrue(!!re.exec("..foo.bar"));
+assertEquals(-1, re.lastIndex);
=======================================
--- /dev/null
+++ /trunk/test/mjsunit/runtime-gen/loadfromsuper.js Fri Sep 19 18:04:45
2014 UTC
@@ -0,0 +1,7 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// AUTO-GENERATED BY tools/generate-runtime-tests.py, DO NOT MODIFY
+// Flags: --allow-natives-syntax --harmony --harmony-proxies
+var _home_object = new Object();
+var _receiver = new Object();
+var _name = "name";
+%LoadFromSuper(_home_object, _receiver, _name);
=======================================
--- /trunk/BUILD.gn Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/BUILD.gn Fri Sep 19 18:04:45 2014 UTC
@@ -504,6 +504,8 @@
"src/compiler/instruction-selector.h",
"src/compiler/instruction.cc",
"src/compiler/instruction.h",
+ "src/compiler/js-builtin-reducer.cc",
+ "src/compiler/js-builtin-reducer.h",
"src/compiler/js-context-specialization.cc",
"src/compiler/js-context-specialization.h",
"src/compiler/js-generic-lowering.cc",
=======================================
--- /trunk/ChangeLog Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/ChangeLog Fri Sep 19 18:04:45 2014 UTC
@@ -1,3 +1,18 @@
+2014-09-19: Version 3.29.81
+
+ Remove a couple of deprecated APIs that moved to Isolate.
+
+ ARM: Make stack limit stricter to account for large buffers in
+ MacroAssembler (Chromium issue 405338).
+
+ Reland 24052 - Require V8 to be explicitly initialized before an
Isolate
+ is created.
+
+ Require V8 to be explicitly initialized before an Isolate is
created.
+
+ Performance and stability improvements on all platforms.
+
+
2014-09-19: Version 3.29.78
Cleanup class parsing a bit (issue 3330).
=======================================
--- /trunk/include/v8.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/include/v8.h Fri Sep 19 18:04:45 2014 UTC
@@ -4099,16 +4099,6 @@
};
-/**
- * Sets the given ResourceConstraints on the given Isolate.
- *
- * Deprecated, will be removed. Pass constraints via Isolate::New or modify
- * the stack limit via Isolate::SetStackLimit.
- */
-bool V8_EXPORT SetResourceConstraints(Isolate* isolate,
- ResourceConstraints* constraints);
-
-
// --- Exceptions ---
@@ -4344,13 +4334,12 @@
/**
- * Isolate represents an isolated instance of the V8 engine. V8
- * isolates have completely separate states. Objects from one isolate
- * must not be used in other isolates. When V8 is initialized a
- * default isolate is implicitly created and entered. The embedder
- * can create additional isolates and use them in parallel in multiple
- * threads. An isolate can be entered by at most one thread at any
- * given time. The Locker/Unlocker API must be used to synchronize.
+ * Isolate represents an isolated instance of the V8 engine. V8 isolates
have
+ * completely separate states. Objects from one isolate must not be used
in
+ * other isolates. The embedder can create multiple isolates and use them
in
+ * parallel in multiple threads. An isolate can be entered by at most one
+ * thread at any given time. The Locker/Unlocker API must be used to
+ * synchronize.
*/
class V8_EXPORT Isolate {
public:
@@ -4358,7 +4347,10 @@
* Initial configuration parameters for a new Isolate.
*/
struct CreateParams {
- CreateParams() : entry_hook(NULL), code_event_handler(NULL) {}
+ CreateParams()
+ : entry_hook(NULL),
+ code_event_handler(NULL),
+ enable_serializer(false) {}
/**
* The optional entry_hook allows the host application to provide the
@@ -4379,6 +4371,11 @@
* ResourceConstraints to use for the new Isolate.
*/
ResourceConstraints constraints;
+
+ /**
+ * This flag currently renders the Isolate unusable.
+ */
+ bool enable_serializer;
};
@@ -4489,6 +4486,8 @@
*
* When an isolate is no longer used its resources should be freed
* by calling Dispose(). Using the delete operator is not allowed.
+ *
+ * V8::Initialize() must have run prior to this.
*/
static Isolate* New(const CreateParams& params = CreateParams());
@@ -5102,9 +5101,8 @@
static void RemoveMemoryAllocationCallback(MemoryAllocationCallback
callback);
/**
- * Initializes from snapshot if possible. Otherwise, attempts to
- * initialize from scratch. This function is called implicitly if
- * you use the API without calling it first.
+ * Initializes V8. This function needs to be called before the first
Isolate
+ * is created. It always returns true.
*/
static bool Initialize();
@@ -5121,50 +5119,6 @@
static void SetReturnAddressLocationResolver(
ReturnAddressLocationResolver return_address_resolver);
- /**
- * Allows the host application to provide the address of a function
that's
- * invoked on entry to every V8-generated function.
- * Note that \p entry_hook is invoked at the very start of each
- * generated function.
- *
- * \param isolate the isolate to operate on.
- * \param entry_hook a function that will be invoked on entry to every
- * V8-generated function.
- * \returns true on success on supported platforms, false on failure.
- * \note Setting an entry hook can only be done very early in an isolates
- * lifetime, and once set, the entry hook cannot be revoked.
- *
- * Deprecated, will be removed. Use Isolate::New(entry_hook) instead.
- */
- static bool SetFunctionEntryHook(Isolate* isolate,
- FunctionEntryHook entry_hook);
-
- /**
- * Allows the host application to provide the address of a function that
is
- * notified each time code is added, moved or removed.
- *
- * \param options options for the JIT code event handler.
- * \param event_handler the JIT code event handler, which will be invoked
- * each time code is added, moved or removed.
- * \note \p event_handler won't get notified of existent code.
- * \note since code removal notifications are not currently issued, the
- * \p event_handler may get notifications of code that overlaps
earlier
- * code notifications. This happens when code areas are reused, and
the
- * earlier overlapping code areas should therefore be discarded.
- * \note the events passed to \p event_handler and the strings they
point to
- * are not guaranteed to live past each call. The \p event_handler
must
- * copy strings and other parameters it needs to keep around.
- * \note the set of events declared in JitCodeEvent::EventType is
expected to
- * grow over time, and the JitCodeEvent structure is expected to
accrue
- * new members. The \p event_handler function must ignore event codes
- * it does not recognize to maintain future compatibility.
- *
- * Deprecated, will be removed. Use Isolate::SetJitCodeEventHandler or
- * Isolate::CreateParams instead.
- */
- static void SetJitCodeEventHandler(JitCodeEventOptions options,
- JitCodeEventHandler event_handler);
-
/**
* Forcefully terminate the current thread of JavaScript execution
* in the given isolate.
=======================================
--- /trunk/samples/lineprocessor.cc Mon Jul 7 00:05:07 2014 UTC
+++ /trunk/samples/lineprocessor.cc Fri Sep 19 18:04:45 2014 UTC
@@ -257,6 +257,7 @@
v8::V8::InitializeICU();
v8::Platform* platform = v8::platform::CreateDefaultPlatform();
v8::V8::InitializePlatform(platform);
+ v8::V8::Initialize();
int result = RunMain(argc, argv);
v8::V8::Dispose();
v8::V8::ShutdownPlatform();
=======================================
--- /trunk/samples/process.cc Mon Jul 7 00:05:07 2014 UTC
+++ /trunk/samples/process.cc Fri Sep 19 18:04:45 2014 UTC
@@ -648,6 +648,7 @@
v8::V8::InitializeICU();
v8::Platform* platform = v8::platform::CreateDefaultPlatform();
v8::V8::InitializePlatform(platform);
+ v8::V8::Initialize();
map<string, string> options;
string file;
ParseOptions(argc, argv, &options, &file);
=======================================
--- /trunk/samples/shell.cc Mon Jul 7 00:05:07 2014 UTC
+++ /trunk/samples/shell.cc Fri Sep 19 18:04:45 2014 UTC
@@ -83,6 +83,7 @@
v8::V8::InitializeICU();
v8::Platform* platform = v8::platform::CreateDefaultPlatform();
v8::V8::InitializePlatform(platform);
+ v8::V8::Initialize();
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
ShellArrayBufferAllocator array_buffer_allocator;
v8::V8::SetArrayBufferAllocator(&array_buffer_allocator);
=======================================
--- /trunk/src/api.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/api.cc Fri Sep 19 18:04:45 2014 UTC
@@ -200,29 +200,6 @@
}
return false;
}
-
-
-// --- S t a t i c s ---
-
-
-static bool InitializeHelper(i::Isolate* isolate) {
- // If the isolate has a function entry hook, it needs to re-build all its
- // code stubs with entry hooks embedded, so let's deserialize a snapshot.
- if (isolate == NULL || isolate->function_entry_hook() == NULL) {
- if (i::Snapshot::Initialize())
- return true;
- }
- return i::V8::Initialize(NULL);
-}
-
-
-static inline bool EnsureInitializedForIsolate(i::Isolate* isolate,
- const char* location) {
- return (isolate != NULL && isolate->IsInitialized()) ||
- Utils::ApiCheck(InitializeHelper(isolate),
- location,
- "Error initializing V8");
-}
StartupDataDecompressor::StartupDataDecompressor()
@@ -494,30 +471,23 @@
}
-bool SetResourceConstraints(Isolate* v8_isolate,
- ResourceConstraints* constraints) {
- i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
- int semi_space_size = constraints->max_semi_space_size();
- int old_space_size = constraints->max_old_space_size();
- int max_executable_size = constraints->max_executable_size();
- size_t code_range_size = constraints->code_range_size();
+void SetResourceConstraints(i::Isolate* isolate,
+ const ResourceConstraints& constraints) {
+ int semi_space_size = constraints.max_semi_space_size();
+ int old_space_size = constraints.max_old_space_size();
+ int max_executable_size = constraints.max_executable_size();
+ size_t code_range_size = constraints.code_range_size();
if (semi_space_size != 0 || old_space_size != 0 ||
max_executable_size != 0 || code_range_size != 0) {
- // After initialization it's too late to change Heap constraints.
- DCHECK(!isolate->IsInitialized());
- bool result = isolate->heap()->ConfigureHeap(semi_space_size,
- old_space_size,
- max_executable_size,
- code_range_size);
- if (!result) return false;
+ isolate->heap()->ConfigureHeap(semi_space_size, old_space_size,
+ max_executable_size, code_range_size);
}
- if (constraints->stack_limit() != NULL) {
- uintptr_t limit =
reinterpret_cast<uintptr_t>(constraints->stack_limit());
+ if (constraints.stack_limit() != NULL) {
+ uintptr_t limit =
reinterpret_cast<uintptr_t>(constraints.stack_limit());
isolate->stack_guard()->SetStackLimit(limit);
}
- isolate->set_max_available_threads(constraints->max_available_threads());
- return true;
+ isolate->set_max_available_threads(constraints.max_available_threads());
}
@@ -746,7 +716,6 @@
// NeanderObject constructor. When you add one to the site calling the
// constructor you should check that you ensured the VM was not dead first.
NeanderObject::NeanderObject(v8::internal::Isolate* isolate, int size) {
- EnsureInitializedForIsolate(isolate, "v8::Nowhere");
ENTER_V8(isolate);
value_ = isolate->factory()->NewNeanderObject();
i::Handle<i::FixedArray> elements =
isolate->factory()->NewFixedArray(size);
@@ -939,7 +908,6 @@
v8::Handle<Signature> signature,
int length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::FunctionTemplate::New()");
LOG_API(i_isolate, "FunctionTemplate::New");
ENTER_V8(i_isolate);
return FunctionTemplateNew(
@@ -951,7 +919,6 @@
Handle<FunctionTemplate> receiver, int
argc,
Handle<FunctionTemplate> argv[]) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Signature::New()");
LOG_API(i_isolate, "Signature::New");
ENTER_V8(i_isolate);
i::Handle<i::Struct> struct_obj =
@@ -1102,7 +1069,6 @@
Local<TypeSwitch> TypeSwitch::New(int argc, Handle<FunctionTemplate>
types[]) {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::TypeSwitch::New()");
LOG_API(isolate, "TypeSwitch::New");
ENTER_V8(isolate);
i::Handle<i::FixedArray> vector =
isolate->factory()->NewFixedArray(argc);
@@ -1283,7 +1249,6 @@
Local<ObjectTemplate> ObjectTemplate::New(
i::Isolate* isolate,
v8::Handle<FunctionTemplate> constructor) {
- EnsureInitializedForIsolate(isolate, "v8::ObjectTemplate::New()");
LOG_API(isolate, "ObjectTemplate::New");
ENTER_V8(isolate);
i::Handle<i::Struct> struct_obj =
@@ -2400,7 +2365,6 @@
Local<Value> JSON::Parse(Local<String> json_string) {
i::Handle<i::String> string = Utils::OpenHandle(*json_string);
i::Isolate* isolate = string->GetIsolate();
- EnsureInitializedForIsolate(isolate, "v8::JSON::Parse");
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::String> source = i::String::Flatten(string);
@@ -4891,7 +4855,6 @@
bool v8::String::IsExternal() const {
i::Handle<i::String> str = Utils::OpenHandle(this);
-
EnsureInitializedForIsolate(str->GetIsolate(), "v8::String::IsExternal()");
return i::StringShape(*str).IsExternalTwoByte();
}
@@ -5080,11 +5043,8 @@
bool v8::V8::Initialize() {
- i::Isolate* isolate = i::Isolate::UncheckedCurrent();
- if (isolate != NULL && isolate->IsInitialized()) {
- return true;
- }
- return InitializeHelper(isolate);
+ i::V8::Initialize();
+ return true;
}
@@ -5097,38 +5057,6 @@
ReturnAddressLocationResolver return_address_resolver) {
i::V8::SetReturnAddressLocationResolver(return_address_resolver);
}
-
-
-bool v8::V8::SetFunctionEntryHook(Isolate* ext_isolate,
- FunctionEntryHook entry_hook) {
- DCHECK(ext_isolate != NULL);
- DCHECK(entry_hook != NULL);
-
- i::Isolate* isolate = reinterpret_cast<i::Isolate*>(ext_isolate);
-
- // The entry hook can only be set before the Isolate is initialized, as
- // otherwise the Isolate's code stubs generated at initialization won't
- // contain entry hooks.
- if (isolate->IsInitialized())
- return false;
-
- // Setting an entry hook is a one-way operation, once set, it cannot be
- // changed or unset.
- if (isolate->function_entry_hook() != NULL)
- return false;
-
- isolate->set_function_entry_hook(entry_hook);
- return true;
-}
-
-
-void v8::V8::SetJitCodeEventHandler(
- JitCodeEventOptions options, JitCodeEventHandler event_handler) {
- i::Isolate* isolate = i::Isolate::Current();
- // Ensure that logging is initialized for our isolate.
- isolate->InitializeLoggingAndCounters();
- isolate->logger()->SetCodeEventHandler(options, event_handler);
-}
void v8::V8::SetArrayBufferAllocator(
ArrayBuffer::Allocator* allocator) {
@@ -5279,7 +5207,6 @@
v8::Handle<ObjectTemplate> global_template,
v8::Handle<Value> global_object) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external_isolate);
- EnsureInitializedForIsolate(isolate, "v8::Context::New()");
LOG_API(isolate, "Context::New");
ON_BAILOUT(isolate, "v8::Context::New()", return Local<Context>());
i::HandleScope scope(isolate);
@@ -5410,7 +5337,6 @@
Local<External> v8::External::New(Isolate* isolate, void* value) {
STATIC_ASSERT(sizeof(value) == sizeof(i::Address));
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::External::New()");
LOG_API(i_isolate, "External::New");
ENTER_V8(i_isolate);
i::Handle<i::JSObject> external =
i_isolate->factory()->NewExternal(value);
@@ -5485,7 +5411,6 @@
String::NewStringType type,
int length) {
i::Isolate* isolate = reinterpret_cast<internal::Isolate*>(v8_isolate);
- EnsureInitializedForIsolate(isolate, location);
LOG_API(isolate, env);
if (length == 0 && type != String::kUndetectableString) {
return String::Empty(v8_isolate);
@@ -5548,7 +5473,6 @@
Local<String> v8::String::Concat(Handle<String> left, Handle<String>
right) {
i::Handle<i::String> left_string = Utils::OpenHandle(*left);
i::Isolate* isolate = left_string->GetIsolate();
- EnsureInitializedForIsolate(isolate, "v8::String::New()");
LOG_API(isolate, "String::New(char)");
ENTER_V8(isolate);
i::Handle<i::String> right_string = Utils::OpenHandle(*right);
@@ -5581,7 +5505,6 @@
Isolate* isolate,
v8::String::ExternalStringResource* resource) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::String::NewExternal()");
LOG_API(i_isolate, "String::NewExternal");
ENTER_V8(i_isolate);
CHECK(resource && resource->data());
@@ -5620,7 +5543,6 @@
Local<String> v8::String::NewExternal(
Isolate* isolate, v8::String::ExternalOneByteStringResource* resource)
{
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::String::NewExternal()");
LOG_API(i_isolate, "String::NewExternal");
ENTER_V8(i_isolate);
CHECK(resource && resource->data());
@@ -5673,7 +5595,6 @@
Local<v8::Object> v8::Object::New(Isolate* isolate) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Object::New()");
LOG_API(i_isolate, "Object::New");
ENTER_V8(i_isolate);
i::Handle<i::JSObject> obj =
@@ -5684,7 +5605,6 @@
Local<v8::Value> v8::NumberObject::New(Isolate* isolate, double value) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::NumberObject::New()");
LOG_API(i_isolate, "NumberObject::New");
ENTER_V8(i_isolate);
i::Handle<i::Object> number = i_isolate->factory()->NewNumber(value);
@@ -5705,7 +5625,6 @@
Local<v8::Value> v8::BooleanObject::New(bool value) {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::BooleanObject::New()");
LOG_API(isolate, "BooleanObject::New");
ENTER_V8(isolate);
i::Handle<i::Object> boolean(value
@@ -5730,7 +5649,6 @@
Local<v8::Value> v8::StringObject::New(Handle<String> value) {
i::Handle<i::String> string = Utils::OpenHandle(*value);
i::Isolate* isolate = string->GetIsolate();
- EnsureInitializedForIsolate(isolate, "v8::StringObject::New()");
LOG_API(isolate, "StringObject::New");
ENTER_V8(isolate);
i::Handle<i::Object> obj =
@@ -5751,7 +5669,6 @@
Local<v8::Value> v8::SymbolObject::New(Isolate* isolate, Handle<Symbol>
value) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::SymbolObject::New()");
LOG_API(i_isolate, "SymbolObject::New");
ENTER_V8(i_isolate);
i::Handle<i::Object> obj = i::Object::ToObject(
@@ -5772,7 +5689,6 @@
Local<v8::Value> v8::Date::New(Isolate* isolate, double time) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Date::New()");
LOG_API(i_isolate, "Date::New");
if (std::isnan(time)) {
// Introduce only canonical NaN value into the VM, to avoid signaling
NaNs.
@@ -5838,7 +5754,6 @@
Local<v8::RegExp> v8::RegExp::New(Handle<String> pattern,
Flags flags) {
i::Isolate* isolate = Utils::OpenHandle(*pattern)->GetIsolate();
- EnsureInitializedForIsolate(isolate, "v8::RegExp::New()");
LOG_API(isolate, "RegExp::New");
ENTER_V8(isolate);
EXCEPTION_PREAMBLE(isolate);
@@ -5875,7 +5790,6 @@
Local<v8::Array> v8::Array::New(Isolate* isolate, int length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Array::New()");
LOG_API(i_isolate, "Array::New");
ENTER_V8(i_isolate);
int real_length = length > 0 ? length : 0;
@@ -6095,7 +6009,6 @@
Local<ArrayBuffer> v8::ArrayBuffer::New(Isolate* isolate, size_t
byte_length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::ArrayBuffer::New(size_t)");
LOG_API(i_isolate, "v8::ArrayBuffer::New(size_t)");
ENTER_V8(i_isolate);
i::Handle<i::JSArrayBuffer> obj =
@@ -6108,7 +6021,6 @@
Local<ArrayBuffer> v8::ArrayBuffer::New(Isolate* isolate, void* data,
size_t byte_length) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::ArrayBuffer::New(void*,
size_t)");
LOG_API(i_isolate, "v8::ArrayBuffer::New(void*, size_t)");
ENTER_V8(i_isolate);
i::Handle<i::JSArrayBuffer> obj =
@@ -6211,8 +6123,6 @@
Local<Type##Array> Type##Array::New(Handle<ArrayBuffer>
array_buffer, \
size_t byte_offset, size_t length)
{ \
i::Isolate* isolate =
Utils::OpenHandle(*array_buffer)->GetIsolate(); \
-
EnsureInitializedForIsolate(isolate, \
- "v8::" #Type "Array::New(Handle<ArrayBuffer>, size_t,
size_t)"); \
LOG_API(isolate, \
"v8::" #Type "Array::New(Handle<ArrayBuffer>, size_t,
size_t)"); \
ENTER_V8(isolate); \
@@ -6236,8 +6146,6 @@
size_t byte_offset, size_t byte_length) {
i::Handle<i::JSArrayBuffer> buffer = Utils::OpenHandle(*array_buffer);
i::Isolate* isolate = buffer->GetIsolate();
- EnsureInitializedForIsolate(
- isolate, "v8::DataView::New(void*, size_t, size_t)");
LOG_API(isolate, "v8::DataView::New(void*, size_t, size_t)");
ENTER_V8(isolate);
i::Handle<i::JSDataView> obj = isolate->factory()->NewJSDataView();
@@ -6249,7 +6157,6 @@
Local<Symbol> v8::Symbol::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Symbol::New()");
LOG_API(i_isolate, "Symbol::New()");
ENTER_V8(i_isolate);
i::Handle<i::Symbol> result = i_isolate->factory()->NewSymbol();
@@ -6314,7 +6221,6 @@
Local<Private> v8::Private::New(Isolate* isolate, Local<String> name) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
- EnsureInitializedForIsolate(i_isolate, "v8::Private::New()");
LOG_API(i_isolate, "Private::New()");
ENTER_V8(i_isolate);
i::Handle<i::Symbol> symbol = i_isolate->factory()->NewPrivateSymbol();
@@ -6386,7 +6292,6 @@
bool V8::AddMessageListener(MessageCallback that, Handle<Value> data) {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::V8::AddMessageListener()");
ON_BAILOUT(isolate, "v8::V8::AddMessageListener()", return false);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
@@ -6402,7 +6307,6 @@
void V8::RemoveMessageListeners(MessageCallback that) {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::V8::RemoveMessageListener()");
ON_BAILOUT(isolate, "v8::V8::RemoveMessageListeners()", return);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
@@ -6663,6 +6567,9 @@
Isolate* Isolate::New(const Isolate::CreateParams& params) {
+ // TODO(jochen): Remove again soon.
+ V8::Initialize();
+
i::Isolate* isolate = new i::Isolate();
Isolate* v8_isolate = reinterpret_cast<Isolate*>(isolate);
if (params.entry_hook) {
@@ -6673,8 +6580,17 @@
isolate->logger()->SetCodeEventHandler(kJitCodeEventDefault,
params.code_event_handler);
}
- SetResourceConstraints(v8_isolate,
-
const_cast<ResourceConstraints*>(¶ms.constraints));
+ SetResourceConstraints(isolate, params.constraints);
+ if (params.enable_serializer) {
+ isolate->enable_serializer();
+ }
+ // TODO(jochen): Once we got rid of Isolate::Current(), we can remove
this.
+ Isolate::Scope isolate_scope(v8_isolate);
+ if (params.entry_hook || !i::Snapshot::Initialize(isolate)) {
+ // If the isolate has a function entry hook, it needs to re-build all
its
+ // code stubs with entry hooks embedded, so don't deserialize a
snapshot.
+ isolate->Init(NULL);
+ }
return v8_isolate;
}
@@ -6977,7 +6893,6 @@
bool Debug::SetDebugEventListener(EventCallback that, Handle<Value> data) {
i::Isolate* isolate = i::Isolate::Current();
-
EnsureInitializedForIsolate(isolate, "v8::Debug::SetDebugEventListener()");
ON_BAILOUT(isolate, "v8::Debug::SetDebugEventListener()", return false);
ENTER_V8(isolate);
i::HandleScope scope(isolate);
@@ -7016,7 +6931,6 @@
void Debug::SetMessageHandler(v8::Debug::MessageHandler handler) {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::Debug::SetMessageHandler");
ENTER_V8(isolate);
isolate->debug()->SetMessageHandler(handler);
}
@@ -7090,7 +7004,6 @@
Local<Context> Debug::GetDebugContext() {
i::Isolate* isolate = i::Isolate::Current();
- EnsureInitializedForIsolate(isolate, "v8::Debug::GetDebugContext()");
ENTER_V8(isolate);
return Utils::ToLocal(i::Isolate::Current()->debug()->GetDebugContext());
}
=======================================
--- /trunk/src/arm/full-codegen-arm.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm/full-codegen-arm.cc Fri Sep 19 18:04:45 2014 UTC
@@ -1353,6 +1353,25 @@
Comment cmnt(masm_, "[ VariableProxy");
EmitVariableLoad(expr);
}
+
+
+void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
+ Comment cnmt(masm_, "[ SuperReference ");
+
+ __ ldr(LoadDescriptor::ReceiverRegister(),
+ MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+
+ Handle<Symbol>
home_object_symbol(isolate()->heap()->home_object_symbol());
+ __ Move(LoadDescriptor::NameRegister(), home_object_symbol);
+
+ CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
+
+ __ cmp(r0, Operand(isolate()->factory()->undefined_value()));
+ Label done;
+ __ b(ne, &done);
+ __ CallRuntime(Runtime::kThrowNonMethodError, 0);
+ __ bind(&done);
+}
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
@@ -2300,6 +2319,7 @@
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
+
__ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
if (FLAG_vector_ics) {
__ mov(VectorLoadICDescriptor::SlotRegister(),
@@ -2311,6 +2331,21 @@
}
+void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+ DCHECK(prop->IsSuperAccess());
+
+ SuperReference* super_ref = prop->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ Push(r0);
+ VisitForStackValue(super_ref->this_var());
+ __ Push(key->value());
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+}
+
+
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
@@ -2598,9 +2633,13 @@
Expression* key = expr->key();
if (key->IsPropertyName()) {
- VisitForAccumulatorValue(expr->obj());
- __ Move(LoadDescriptor::ReceiverRegister(), r0);
- EmitNamedPropertyLoad(expr);
+ if (!expr->IsSuperAccess()) {
+ VisitForAccumulatorValue(expr->obj());
+ __ Move(LoadDescriptor::ReceiverRegister(), r0);
+ EmitNamedPropertyLoad(expr);
+ } else {
+ EmitNamedSuperPropertyLoad(expr);
+ }
PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(r0);
} else {
@@ -2643,6 +2682,7 @@
} else {
// Load the function from the receiver.
DCHECK(callee->IsProperty());
+ DCHECK(!callee->AsProperty()->IsSuperAccess());
__ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
EmitNamedPropertyLoad(callee->AsProperty());
PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
@@ -2655,6 +2695,45 @@
EmitCall(expr, call_type);
}
+
+void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
+ Expression* callee = expr->expression();
+ DCHECK(callee->IsProperty());
+ Property* prop = callee->AsProperty();
+ DCHECK(prop->IsSuperAccess());
+
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+ // Load the function from the receiver.
+ const Register scratch = r1;
+ SuperReference* super_ref = prop->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ Push(r0);
+ VisitForAccumulatorValue(super_ref->this_var());
+ __ Push(r0);
+ __ ldr(scratch, MemOperand(sp, kPointerSize));
+ __ Push(scratch);
+ __ Push(r0);
+ __ Push(key->value());
+
+ // Stack here:
+ // - home_object
+ // - this (receiver)
+ // - home_object <-- LoadFromSuper will pop here and below.
+ // - this (receiver)
+ // - key
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+
+ // Replace home_object with target function.
+ __ str(r0, MemOperand(sp, kPointerSize));
+
+ // Stack here:
+ // - target function
+ // - this (receiver)
+ EmitCall(expr, CallICState::METHOD);
+}
+
// Code common for calls using the IC.
void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
@@ -2825,13 +2904,20 @@
EmitCall(expr);
} else if (call_type == Call::PROPERTY_CALL) {
Property* property = callee->AsProperty();
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(property->obj());
- }
- if (property->key()->IsPropertyName()) {
- EmitCallWithLoadIC(expr);
+ bool is_named_call = property->key()->IsPropertyName();
+ // super.x() is handled in EmitCallWithLoadIC.
+ if (property->IsSuperAccess() && is_named_call) {
+ EmitSuperCallWithLoadIC(expr);
} else {
- EmitKeyedCallWithLoadIC(expr, property->key());
+ {
+ PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (is_named_call) {
+ EmitCallWithLoadIC(expr);
+ } else {
+ EmitKeyedCallWithLoadIC(expr, property->key());
+ }
}
} else {
DCHECK(call_type == Call::OTHER_CALL);
=======================================
--- /trunk/src/arm/lithium-codegen-arm.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm/lithium-codegen-arm.cc Fri Sep 19 18:04:45 2014 UTC
@@ -52,11 +52,8 @@
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::NONE);
- return GeneratePrologue() &&
- GenerateBody() &&
- GenerateDeferredCode() &&
- GenerateDeoptJumpTable() &&
- GenerateSafepointTable();
+ return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
+ GenerateJumpTable() && GenerateSafepointTable();
}
@@ -313,7 +310,7 @@
}
-bool LCodeGen::GenerateDeoptJumpTable() {
+bool LCodeGen::GenerateJumpTable() {
// Check that the jump table is accessible from everywhere in the
function
// code, i.e. that offsets to the table can be encoded in the 24bit
signed
// immediate of a branch instruction.
@@ -336,21 +333,23 @@
int length = deopt_jump_table_.length();
for (int i = 0; i < length; i++) {
- __ bind(&deopt_jump_table_[i].label);
+ Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i];
+ __ bind(&table_entry->label);
- Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
DCHECK(type == deopt_jump_table_[0].bailout_type);
- Address entry = deopt_jump_table_[i].address;
+ Address entry = table_entry->address;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- DCHECK(id != Deoptimizer::kNotDeoptimizationEntry);
+ DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
// Second-level deopt table entries are contiguous and small, so
instead
// of loading the full, absolute address of each one, load an
immediate
// offset which will be added to the base address later.
__ mov(entry_offset, Operand(entry - base));
- if (deopt_jump_table_[i].needs_frame) {
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
if (needs_frame.is_bound()) {
__ b(&needs_frame);
@@ -847,6 +846,7 @@
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment,
Safepoint::kNoLazyDeopt);
@@ -904,6 +904,7 @@
// restore caller doubles.
if (condition == al && frame_is_built_ &&
!info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
@@ -912,9 +913,8 @@
(deopt_jump_table_.last().address != entry) ||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(),
reason,
+
bailout_type, !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ b(condition, &deopt_jump_table_.last().label);
@@ -922,11 +922,12 @@
}
-void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr) {
+void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(condition, instr, bailout_type);
+ DeoptimizeIf(condition, instr, reason, bailout_type);
}
@@ -5665,8 +5666,7 @@
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(al, instr, type);
+ DeoptimizeIf(al, instr, instr->hydrogen()->reason(), type);
}
=======================================
--- /trunk/src/arm/lithium-codegen-arm.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm/lithium-codegen-arm.h Fri Sep 19 18:04:45 2014 UTC
@@ -172,7 +172,7 @@
void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
- bool GenerateDeoptJumpTable();
+ bool GenerateJumpTable();
bool GenerateSafepointTable();
// Generates the custom OSR entrypoint and sets the osr_pc_offset.
@@ -235,8 +235,9 @@
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition condition, LInstruction* instr,
- Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition condition, LInstruction* instr);
+ const char* reason, Deoptimizer::BailoutType
bailout_type);
+ void DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason = NULL);
void AddToTranslation(LEnvironment* environment,
Translation* translation,
=======================================
--- /trunk/src/arm64/full-codegen-arm64.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm64/full-codegen-arm64.cc Fri Sep 19 18:04:45 2014 UTC
@@ -1339,6 +1339,26 @@
Comment cmnt(masm_, "[ VariableProxy");
EmitVariableLoad(expr);
}
+
+
+void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
+ Comment cnmt(masm_, "[ SuperReference ");
+
+ __ ldr(LoadDescriptor::ReceiverRegister(),
+ MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+
+ Handle<Symbol>
home_object_symbol(isolate()->heap()->home_object_symbol());
+ __ Mov(LoadDescriptor::NameRegister(), Operand(home_object_symbol));
+
+ CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
+
+ __ Mov(x10, Operand(isolate()->factory()->undefined_value()));
+ __ cmp(x0, x10);
+ Label done;
+ __ b(&done, ne);
+ __ CallRuntime(Runtime::kThrowNonMethodError, 0);
+ __ bind(&done);
+}
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
@@ -1949,6 +1969,8 @@
void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
Literal* key = prop->key()->AsLiteral();
+ DCHECK(!prop->IsSuperAccess());
+
__ Mov(LoadDescriptor::NameRegister(), Operand(key->value()));
if (FLAG_vector_ics) {
__ Mov(VectorLoadICDescriptor::SlotRegister(),
@@ -1958,6 +1980,21 @@
CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
}
}
+
+
+void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+ DCHECK(prop->IsSuperAccess());
+
+ SuperReference* super_ref = prop->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ Push(x0);
+ VisitForStackValue(super_ref->this_var());
+ __ Push(key->value());
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
@@ -2263,9 +2300,13 @@
Expression* key = expr->key();
if (key->IsPropertyName()) {
- VisitForAccumulatorValue(expr->obj());
- __ Move(LoadDescriptor::ReceiverRegister(), x0);
- EmitNamedPropertyLoad(expr);
+ if (!expr->IsSuperAccess()) {
+ VisitForAccumulatorValue(expr->obj());
+ __ Move(LoadDescriptor::ReceiverRegister(), x0);
+ EmitNamedPropertyLoad(expr);
+ } else {
+ EmitNamedSuperPropertyLoad(expr);
+ }
PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(x0);
} else {
@@ -2307,6 +2348,7 @@
} else {
// Load the function from the receiver.
DCHECK(callee->IsProperty());
+ DCHECK(!callee->AsProperty()->IsSuperAccess());
__ Peek(LoadDescriptor::ReceiverRegister(), 0);
EmitNamedPropertyLoad(callee->AsProperty());
PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
@@ -2318,6 +2360,45 @@
EmitCall(expr, call_type);
}
+
+void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
+ Expression* callee = expr->expression();
+ DCHECK(callee->IsProperty());
+ Property* prop = callee->AsProperty();
+ DCHECK(prop->IsSuperAccess());
+
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+
+ // Load the function from the receiver.
+ const Register scratch = x10;
+ SuperReference* super_ref =
callee->AsProperty()->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ Push(x0);
+ VisitForAccumulatorValue(super_ref->this_var());
+ __ Push(x0);
+ __ Peek(scratch, kPointerSize);
+ __ Push(scratch, x0);
+ __ Push(key->value());
+
+ // Stack here:
+ // - home_object
+ // - this (receiver)
+ // - home_object <-- LoadFromSuper will pop here and below.
+ // - this (receiver)
+ // - key
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+
+ // Replace home_object with target function.
+ __ Poke(x0, kPointerSize);
+
+ // Stack here:
+ // - target function
+ // - this (receiver)
+ EmitCall(expr, CallICState::METHOD);
+}
+
// Code common for calls using the IC.
void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
@@ -2491,15 +2572,21 @@
EmitCall(expr);
} else if (call_type == Call::PROPERTY_CALL) {
Property* property = callee->AsProperty();
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(property->obj());
- }
- if (property->key()->IsPropertyName()) {
- EmitCallWithLoadIC(expr);
+ bool is_named_call = property->key()->IsPropertyName();
+ // super.x() is handled in EmitCallWithLoadIC.
+ if (property->IsSuperAccess() && is_named_call) {
+ EmitSuperCallWithLoadIC(expr);
} else {
- EmitKeyedCallWithLoadIC(expr, property->key());
+ {
+ PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (is_named_call) {
+ EmitCallWithLoadIC(expr);
+ } else {
+ EmitKeyedCallWithLoadIC(expr, property->key());
+ }
}
-
} else {
DCHECK(call_type == Call::OTHER_CALL);
// Call to an arbitrary expression not handled specially above.
=======================================
--- /trunk/src/arm64/lithium-codegen-arm64.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm64/lithium-codegen-arm64.cc Fri Sep 19 18:04:45 2014 UTC
@@ -593,11 +593,8 @@
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::NONE);
- return GeneratePrologue() &&
- GenerateBody() &&
- GenerateDeferredCode() &&
- GenerateDeoptJumpTable() &&
- GenerateSafepointTable();
+ return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
+ GenerateJumpTable() && GenerateSafepointTable();
}
@@ -827,28 +824,27 @@
}
-bool LCodeGen::GenerateDeoptJumpTable() {
+bool LCodeGen::GenerateJumpTable() {
Label needs_frame, restore_caller_doubles, call_deopt_entry;
- if (deopt_jump_table_.length() > 0) {
+ if (jump_table_.length() > 0) {
Comment(";;; -------------------- Jump table --------------------");
- Address base = deopt_jump_table_[0]->address;
+ Address base = jump_table_[0]->address;
UseScratchRegisterScope temps(masm());
Register entry_offset = temps.AcquireX();
- int length = deopt_jump_table_.length();
+ int length = jump_table_.length();
for (int i = 0; i < length; i++) {
- __ Bind(&deopt_jump_table_[i]->label);
+ Deoptimizer::JumpTableEntry* table_entry = jump_table_[i];
+ __ Bind(&table_entry->label);
- Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
- Address entry = deopt_jump_table_[i]->address;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
+ Address entry = table_entry->address;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
- if (id == Deoptimizer::kNotDeoptimizationEntry) {
- Comment(";;; jump table entry %d.", i);
- } else {
- Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
- }
+ DCHECK_NE(Deoptimizer::kNotDeoptimizationEntry, id);
+ Comment(";;; jump table entry %d: deoptimization bailout %d.", i,
id);
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
// Second-level deopt table entries are contiguous and small, so
instead
// of loading the full, absolute address of each one, load the base
@@ -859,7 +855,7 @@
// branch.
bool last_entry = (i + 1) == length;
- if (deopt_jump_table_[i]->needs_frame) {
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
if (!needs_frame.is_bound()) {
// This variant of deopt can only be used with stubs. Since we
don't
@@ -997,8 +993,8 @@
void LCodeGen::DeoptimizeBranch(
- LInstruction* instr, BranchType branch_type, Register reg, int bit,
- Deoptimizer::BailoutType* override_bailout_type) {
+ LInstruction* instr, const char* reason, BranchType branch_type,
+ Register reg, int bit, Deoptimizer::BailoutType*
override_bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment,
Safepoint::kNoLazyDeopt);
Deoptimizer::BailoutType bailout_type =
@@ -1052,91 +1048,98 @@
// Go through jump table if we need to build frame, or restore caller
doubles.
if (branch_type == always &&
frame_is_built_ && !info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last()->address != entry) ||
- (deopt_jump_table_.last()->bailout_type != bailout_type) ||
- (deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
+ if (jump_table_.is_empty() || (jump_table_.last()->address != entry) ||
+ (jump_table_.last()->bailout_type != bailout_type) ||
+ (jump_table_.last()->needs_frame != !frame_is_built_)) {
Deoptimizer::JumpTableEntry* table_entry =
- new(zone()) Deoptimizer::JumpTableEntry(entry,
- bailout_type,
- !frame_is_built_);
- deopt_jump_table_.Add(table_entry, zone());
+ new (zone()) Deoptimizer::JumpTableEntry(
+ entry, instr->Mnemonic(), reason,
bailout_type, !frame_is_built_);
+ jump_table_.Add(table_entry, zone());
}
- __ B(&deopt_jump_table_.last()->label,
- branch_type, reg, bit);
+ __ B(&jump_table_.last()->label, branch_type, reg, bit);
}
}
void LCodeGen::Deoptimize(LInstruction* instr,
- Deoptimizer::BailoutType* override_bailout_type)
{
- DeoptimizeBranch(instr, always, NoReg, -1, override_bailout_type);
+ Deoptimizer::BailoutType* override_bailout_type,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, always, NoReg, -1,
override_bailout_type);
}
-void LCodeGen::DeoptimizeIf(Condition cond, LInstruction* instr) {
- DeoptimizeBranch(instr, static_cast<BranchType>(cond));
+void LCodeGen::DeoptimizeIf(Condition cond, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, static_cast<BranchType>(cond));
}
-void LCodeGen::DeoptimizeIfZero(Register rt, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_zero, rt);
+void LCodeGen::DeoptimizeIfZero(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_zero, rt);
}
-void LCodeGen::DeoptimizeIfNotZero(Register rt, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_not_zero, rt);
+void LCodeGen::DeoptimizeIfNotZero(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_not_zero, rt);
}
-void LCodeGen::DeoptimizeIfNegative(Register rt, LInstruction* instr) {
+void LCodeGen::DeoptimizeIfNegative(Register rt, LInstruction* instr,
+ const char* reason) {
int sign_bit = rt.Is64Bits() ? kXSignBit : kWSignBit;
- DeoptimizeIfBitSet(rt, sign_bit, instr);
+ DeoptimizeIfBitSet(rt, sign_bit, instr, reason);
}
-void LCodeGen::DeoptimizeIfSmi(Register rt, LInstruction* instr) {
- DeoptimizeIfBitClear(rt, MaskToBit(kSmiTagMask), instr);
+void LCodeGen::DeoptimizeIfSmi(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeIfBitClear(rt, MaskToBit(kSmiTagMask), instr, reason);
}
-void LCodeGen::DeoptimizeIfNotSmi(Register rt, LInstruction* instr) {
- DeoptimizeIfBitSet(rt, MaskToBit(kSmiTagMask), instr);
+void LCodeGen::DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeIfBitSet(rt, MaskToBit(kSmiTagMask), instr, reason);
}
void LCodeGen::DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr) {
+ LInstruction* instr, const char* reason) {
__ CompareRoot(rt, index);
- DeoptimizeIf(eq, instr);
+ DeoptimizeIf(eq, instr, reason);
}
void LCodeGen::DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr) {
+ LInstruction* instr, const char*
reason) {
__ CompareRoot(rt, index);
- DeoptimizeIf(ne, instr);
+ DeoptimizeIf(ne, instr, reason);
}
-void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input,
- LInstruction* instr) {
+void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input, LInstruction*
instr,
+ const char* reason) {
__ TestForMinusZero(input);
- DeoptimizeIf(vs, instr);
+ DeoptimizeIf(vs, instr, reason);
}
-void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction*
instr) {
- DeoptimizeBranch(instr, reg_bit_set, rt, bit);
+void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction*
instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_bit_set, rt, bit);
}
-void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction*
instr) {
- DeoptimizeBranch(instr, reg_bit_clear, rt, bit);
+void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction*
instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_bit_clear, rt, bit);
}
@@ -2685,8 +2688,7 @@
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- Deoptimize(instr, &type);
+ Deoptimize(instr, &type, instr->hydrogen()->reason());
}
=======================================
--- /trunk/src/arm64/lithium-codegen-arm64.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/arm64/lithium-codegen-arm64.h Fri Sep 19 18:04:45 2014 UTC
@@ -27,7 +27,7 @@
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
: LCodeGenBase(chunk, assembler, info),
deoptimizations_(4, info->zone()),
- deopt_jump_table_(4, info->zone()),
+ jump_table_(4, info->zone()),
deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
@@ -213,24 +213,35 @@
Register temp,
LOperand* index,
String::Encoding encoding);
- void DeoptimizeBranch(LInstruction* instr, BranchType branch_type,
- Register reg = NoReg, int bit = -1,
+ void DeoptimizeBranch(LInstruction* instr, const char* reason,
+ BranchType branch_type, Register reg = NoReg,
+ int bit = -1,
Deoptimizer::BailoutType* override_bailout_type =
NULL);
void Deoptimize(LInstruction* instr,
- Deoptimizer::BailoutType* override_bailout_type = NULL);
- void DeoptimizeIf(Condition cond, LInstruction* instr);
- void DeoptimizeIfZero(Register rt, LInstruction* instr);
- void DeoptimizeIfNotZero(Register rt, LInstruction* instr);
- void DeoptimizeIfNegative(Register rt, LInstruction* instr);
- void DeoptimizeIfSmi(Register rt, LInstruction* instr);
- void DeoptimizeIfNotSmi(Register rt, LInstruction* instr);
+ Deoptimizer::BailoutType* override_bailout_type = NULL,
+ const char* reason = NULL);
+ void DeoptimizeIf(Condition cond, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfZero(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNegative(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfSmi(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
+ const char* reason = NULL);
void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr);
+ LInstruction* instr, const char* reason = NULL);
void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr);
- void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr);
- void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr);
- void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr);
+ LInstruction* instr, const char* reason = NULL);
+ void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
+ const char* reason = NULL);
MemOperand PrepareKeyedExternalArrayOperand(Register key,
Register base,
@@ -273,7 +284,7 @@
void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
- bool GenerateDeoptJumpTable();
+ bool GenerateJumpTable();
bool GenerateSafepointTable();
// Generates the custom OSR entrypoint and sets the osr_pc_offset.
@@ -338,7 +349,7 @@
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
=======================================
--- /trunk/src/base/utils/random-number-generator.cc Tue Aug 5 00:05:55
2014 UTC
+++ /trunk/src/base/utils/random-number-generator.cc Fri Sep 19 18:04:45
2014 UTC
@@ -125,6 +125,7 @@
void RandomNumberGenerator::SetSeed(int64_t seed) {
+ initial_seed_ = seed;
seed_ = (seed ^ kMultiplier) & kMask;
}
=======================================
--- /trunk/src/base/utils/random-number-generator.h Tue Sep 2 12:59:15
2014 UTC
+++ /trunk/src/base/utils/random-number-generator.h Fri Sep 19 18:04:45
2014 UTC
@@ -73,6 +73,8 @@
// Override the current ssed.
void SetSeed(int64_t seed);
+
+ int64_t initial_seed() const { return initial_seed_; }
private:
static const int64_t kMultiplier = V8_2PART_UINT64_C(0x5, deece66d);
@@ -81,6 +83,7 @@
int Next(int bits) WARN_UNUSED_RESULT;
+ int64_t initial_seed_;
int64_t seed_;
};
=======================================
--- /trunk/src/bootstrapper.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/bootstrapper.cc Fri Sep 19 18:04:45 2014 UTC
@@ -203,6 +203,7 @@
// New context initialization. Used for creating a context from scratch.
void InitializeGlobal(Handle<GlobalObject> global_object,
Handle<JSFunction> empty_function);
+ void InitializeExperimentalGlobal();
// Installs the contents of the native .js files on the global objects.
// Used for creating a context from scratch.
void InstallNativeFunctions();
@@ -482,12 +483,14 @@
{ // --- O b j e c t ---
Handle<JSFunction> object_fun = factory->NewFunction(object_name);
+ int unused = JSObject::kInitialGlobalObjectUnusedPropertiesCount;
+ int instance_size = JSObject::kHeaderSize + kPointerSize * unused;
Handle<Map> object_function_map =
- factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ factory->NewMap(JS_OBJECT_TYPE, instance_size);
+ object_function_map->set_inobject_properties(unused);
JSFunction::SetInitialMap(object_fun, object_function_map,
isolate->factory()->null_value());
- object_function_map->set_unused_property_fields(
- JSObject::kInitialGlobalObjectUnusedPropertiesCount);
+ object_function_map->set_unused_property_fields(unused);
native_context()->set_object_function(*object_fun);
@@ -1152,11 +1155,12 @@
{ // Set up the iterator result object
STATIC_ASSERT(JSGeneratorObject::kResultPropertyCount == 2);
Handle<JSFunction>
object_function(native_context()->object_function());
- DCHECK(object_function->initial_map()->inobject_properties() == 0);
Handle<Map> iterator_result_map =
- Map::Create(object_function,
JSGeneratorObject::kResultPropertyCount);
- DCHECK(iterator_result_map->inobject_properties() ==
- JSGeneratorObject::kResultPropertyCount);
+ Map::Create(isolate, JSGeneratorObject::kResultPropertyCount);
+ DCHECK_EQ(JSGeneratorObject::kResultSize,
+ iterator_result_map->instance_size());
+ DCHECK_EQ(JSGeneratorObject::kResultPropertyCount,
+ iterator_result_map->inobject_properties());
Map::EnsureDescriptorSlack(iterator_result_map,
JSGeneratorObject::kResultPropertyCount);
@@ -1171,6 +1175,8 @@
iterator_result_map->AppendDescriptor(&done_descr);
iterator_result_map->set_unused_property_fields(0);
+ iterator_result_map->set_pre_allocated_property_fields(
+ JSGeneratorObject::kResultPropertyCount);
DCHECK_EQ(JSGeneratorObject::kResultSize,
iterator_result_map->instance_size());
native_context()->set_iterator_result_map(*iterator_result_map);
@@ -1349,6 +1355,20 @@
ElementsKind external_kind =
GetNextTransitionElementsKind(elements_kind);
*external_map = Map::AsElementsKind(initial_map, external_kind);
}
+
+
+void Genesis::InitializeExperimentalGlobal() {
+ // TODO(erikcorry): Move this into Genesis::InitializeGlobal once we no
+ // longer need to live behind a flag.
+ Handle<JSObject> builtins(native_context()->builtins());
+
+ Handle<HeapObject> flag(
+ FLAG_harmony_regexps ? heap()->true_value() : heap()->false_value());
+ PropertyAttributes attributes =
+ static_cast<PropertyAttributes>(DONT_DELETE | READ_ONLY);
+ Runtime::DefineObjectProperty(builtins,
factory()->harmony_regexps_string(),
+ flag, attributes).Assert();
+}
bool Genesis::CompileBuiltin(Isolate* isolate, int index) {
@@ -1908,8 +1928,7 @@
*strict_generator_function_map);
Handle<JSFunction>
object_function(native_context()->object_function());
- Handle<Map> generator_object_prototype_map =
- Map::Create(object_function, 0);
+ Handle<Map> generator_object_prototype_map = Map::Create(isolate(), 0);
generator_object_prototype_map->set_prototype(*generator_object_prototype);
native_context()->set_generator_object_prototype_map(
*generator_object_prototype_map);
@@ -2594,9 +2613,6 @@
active_(isolate->bootstrapper()) {
NoTrackDoubleFieldsForSerializerScope disable_scope(isolate);
result_ = Handle<Context>::null();
- // If V8 cannot be initialized, just return.
- if (!V8::Initialize(NULL)) return;
-
// Before creating the roots we must save the context and restore it
// on all function exits.
SaveContext saved_context(isolate);
@@ -2651,6 +2667,7 @@
// Install experimental natives.
if (!InstallExperimentalNatives()) return;
+ InitializeExperimentalGlobal();
// We can't (de-)serialize typed arrays currently, but we are lucky: The
state
// of the random number generator needs no initialization during snapshot
=======================================
--- /trunk/src/compilation-cache.cc Tue Aug 5 00:05:55 2014 UTC
+++ /trunk/src/compilation-cache.cc Fri Sep 19 18:04:45 2014 UTC
@@ -344,7 +344,7 @@
MaybeHandle<FixedArray> CompilationCache::LookupRegExp(Handle<String>
source,
- JSRegExp::Flags flags) {
+ JSRegExp::Flags
flags) {
if (!IsEnabled()) return MaybeHandle<FixedArray>();
return reg_exp_.Lookup(source, flags);
=======================================
--- /trunk/src/compiler/ia32/code-generator-ia32.cc Tue Sep 9 00:05:04
2014 UTC
+++ /trunk/src/compiler/ia32/code-generator-ia32.cc Fri Sep 19 18:04:45
2014 UTC
@@ -303,8 +303,7 @@
case kSSEFloat64ToUint32: {
XMMRegister scratch = xmm0;
__ Move(scratch, -2147483648.0);
- // TODO(turbofan): IA32 SSE subsd() should take an operand.
- __ addsd(scratch, i.InputDoubleRegister(0));
+ __ addsd(scratch, i.InputOperand(0));
__ cvttsd2si(i.OutputRegister(), scratch);
__ add(i.OutputRegister(), Immediate(0x80000000));
break;
=======================================
--- /trunk/src/compiler/ia32/instruction-selector-ia32.cc Fri Sep 12
00:05:16 2014 UTC
+++ /trunk/src/compiler/ia32/instruction-selector-ia32.cc Fri Sep 19
18:04:45 2014 UTC
@@ -376,9 +376,7 @@
void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
IA32OperandGenerator g(this);
- // TODO(turbofan): IA32 SSE subsd() should take an operand.
- Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node),
- g.UseRegister(node->InputAt(0)));
+ Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node),
g.Use(node->InputAt(0)));
}
=======================================
--- /trunk/src/compiler/js-typed-lowering.cc Thu Sep 18 00:05:06 2014 UTC
+++ /trunk/src/compiler/js-typed-lowering.cc Fri Sep 19 18:04:45 2014 UTC
@@ -4,6 +4,7 @@
#include "src/compiler/access-builder.h"
#include "src/compiler/graph-inl.h"
+#include "src/compiler/js-builtin-reducer.h"
#include "src/compiler/js-typed-lowering.h"
#include "src/compiler/node-aux-data-inl.h"
#include "src/compiler/node-properties-inl.h"
@@ -678,6 +679,8 @@
return ReduceJSLoadProperty(node);
case IrOpcode::kJSStoreProperty:
return ReduceJSStoreProperty(node);
+ case IrOpcode::kJSCallFunction:
+ return JSBuiltinReducer(jsgraph()).Reduce(node);
default:
break;
}
=======================================
--- /trunk/src/compiler/js-typed-lowering.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/compiler/js-typed-lowering.h Fri Sep 19 18:04:45 2014 UTC
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef V8_COMPILER_OPERATOR_REDUCERS_H_
-#define V8_COMPILER_OPERATOR_REDUCERS_H_
+#ifndef V8_COMPILER_JS_TYPED_LOWERING_H_
+#define V8_COMPILER_JS_TYPED_LOWERING_H_
#include "src/compiler/graph-reducer.h"
#include "src/compiler/js-graph.h"
@@ -61,4 +61,4 @@
} // namespace internal
} // namespace v8
-#endif // V8_COMPILER_OPERATOR_REDUCERS_H_
+#endif // V8_COMPILER_JS_TYPED_LOWERING_H_
=======================================
--- /trunk/src/compiler/pipeline.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/compiler/pipeline.cc Fri Sep 19 18:04:45 2014 UTC
@@ -151,6 +151,17 @@
Handle<Code> Pipeline::GenerateCode() {
+ if (info()->function()->dont_optimize_reason() == kTryCatchStatement ||
+ info()->function()->dont_optimize_reason() == kTryFinallyStatement ||
+ // TODO(turbofan): Make ES6 for-of work and remove this bailout.
+ info()->function()->dont_optimize_reason() == kForOfStatement ||
+ // TODO(turbofan): Make super work and remove this bailout.
+ info()->function()->dont_optimize_reason() == kSuperReference ||
+ // TODO(turbofan): Make OSR work and remove this bailout.
+ info()->is_osr()) {
+ return Handle<Code>::null();
+ }
+
if (FLAG_turbo_stats) isolate()->GetTStatistics()->Initialize(info_);
if (FLAG_trace_turbo) {
=======================================
--- /trunk/src/compiler/representation-change.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/compiler/representation-change.h Fri Sep 19 18:04:45 2014 UTC
@@ -53,6 +53,8 @@
return GetTaggedRepresentationFor(node, output_type);
} else if (use_type & kRepFloat64) {
return GetFloat64RepresentationFor(node, output_type);
+ } else if (use_type & kRepFloat32) {
+ return TypeError(node, output_type, use_type); // TODO(titzer):
handle
} else if (use_type & kRepBit) {
return GetBitRepresentationFor(node, output_type);
} else if (use_type & rWord) {
=======================================
--- /trunk/src/compiler/simplified-lowering.cc Thu Sep 18 00:05:06 2014 UTC
+++ /trunk/src/compiler/simplified-lowering.cc Fri Sep 19 18:04:45 2014 UTC
@@ -348,6 +348,15 @@
const Operator* Float64Op(Node* node) {
return changer_->Float64OperatorFor(node->opcode());
}
+
+ static MachineType AssumeImplicitFloat32Change(MachineType type) {
+ // TODO(titzer): Assume loads of float32 change representation to
float64.
+ // Fix this with full support for float32 representations.
+ if (type & kRepFloat32) {
+ return static_cast<MachineType>((type & ~kRepFloat32) | kRepFloat64);
+ }
+ return type;
+ }
// Dispatching routine for visiting the node {node} with the usage {use}.
// Depending on the operator, propagate new usage info to the inputs.
@@ -570,14 +579,14 @@
FieldAccess access = FieldAccessOf(node->op());
ProcessInput(node, 0, changer_->TypeForBasePointer(access));
ProcessRemainingInputs(node, 1);
- SetOutput(node, access.machine_type);
+ SetOutput(node, AssumeImplicitFloat32Change(access.machine_type));
if (lower()) lowering->DoLoadField(node);
break;
}
case IrOpcode::kStoreField: {
FieldAccess access = FieldAccessOf(node->op());
ProcessInput(node, 0, changer_->TypeForBasePointer(access));
- ProcessInput(node, 1, access.machine_type);
+ ProcessInput(node, 1,
AssumeImplicitFloat32Change(access.machine_type));
ProcessRemainingInputs(node, 2);
SetOutput(node, 0);
if (lower()) lowering->DoStoreField(node);
@@ -588,7 +597,7 @@
ProcessInput(node, 0, changer_->TypeForBasePointer(access));
ProcessInput(node, 1, kMachInt32); // element index
ProcessRemainingInputs(node, 2);
- SetOutput(node, access.machine_type);
+ SetOutput(node, AssumeImplicitFloat32Change(access.machine_type));
if (lower()) lowering->DoLoadElement(node);
break;
}
@@ -596,7 +605,7 @@
ElementAccess access = ElementAccessOf(node->op());
ProcessInput(node, 0, changer_->TypeForBasePointer(access));
ProcessInput(node, 1, kMachInt32); // element index
- ProcessInput(node, 2, access.machine_type);
+ ProcessInput(node, 2,
AssumeImplicitFloat32Change(access.machine_type));
ProcessRemainingInputs(node, 3);
SetOutput(node, 0);
if (lower()) lowering->DoStoreElement(node);
=======================================
--- /trunk/src/compiler/x64/code-generator-x64.cc Tue Sep 9 00:05:04 2014
UTC
+++ /trunk/src/compiler/x64/code-generator-x64.cc Fri Sep 19 18:04:45 2014
UTC
@@ -469,8 +469,12 @@
break;
}
case kSSEFloat64ToUint32: {
- // TODO(turbofan): X64 SSE cvttsd2siq should support operands.
- __ cvttsd2siq(i.OutputRegister(), i.InputDoubleRegister(0));
+ RegisterOrOperand input = i.InputRegisterOrOperand(0);
+ if (input.type == kDoubleRegister) {
+ __ cvttsd2siq(i.OutputRegister(), input.double_reg);
+ } else {
+ __ cvttsd2siq(i.OutputRegister(), input.operand);
+ }
__ andl(i.OutputRegister(), i.OutputRegister()); // clear upper
bits.
// TODO(turbofan): generated code should not look at the upper 32
bits
// of the result, but those bits could escape to the outside world.
=======================================
--- /trunk/src/compiler/x64/instruction-selector-x64.cc Fri Sep 12 00:05:16
2014 UTC
+++ /trunk/src/compiler/x64/instruction-selector-x64.cc Fri Sep 19 18:04:45
2014 UTC
@@ -500,9 +500,7 @@
void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
X64OperandGenerator g(this);
- // TODO(turbofan): X64 SSE cvttsd2siq should support operands.
- Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node),
- g.UseRegister(node->InputAt(0)));
+ Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node),
g.Use(node->InputAt(0)));
}
=======================================
--- /trunk/src/compiler.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/compiler.cc Fri Sep 19 18:04:45 2014 UTC
@@ -396,14 +396,7 @@
DCHECK(info()->shared_info()->has_deoptimization_support());
// Check the whitelist for TurboFan.
- if (info()->closure()->PassesFilter(FLAG_turbo_filter) &&
- // TODO(turbofan): Make try-catch work and remove this bailout.
- info()->function()->dont_optimize_reason() != kTryCatchStatement &&
- info()->function()->dont_optimize_reason() != kTryFinallyStatement &&
- // TODO(turbofan): Make ES6 for-of work and remove this bailout.
- info()->function()->dont_optimize_reason() != kForOfStatement &&
- // TODO(turbofan): Make OSR work and remove this bailout.
- !info()->is_osr()) {
+ if (info()->closure()->PassesFilter(FLAG_turbo_filter)) {
compiler::Pipeline pipeline(info());
pipeline.GenerateCode();
if (!info()->code().is_null()) {
@@ -623,6 +616,7 @@
function_info->set_bailout_reason(lit->dont_optimize_reason());
function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
function_info->set_kind(lit->kind());
+ function_info->set_asm_function(lit->scope()->asm_function());
}
@@ -729,6 +723,38 @@
MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
DCHECK(!function->GetIsolate()->has_pending_exception());
DCHECK(!function->is_compiled());
+
+ if (FLAG_turbo_asm && function->shared()->asm_function()) {
+ CompilationInfoWithZone info(function);
+
+ VMState<COMPILER> state(info.isolate());
+ PostponeInterruptsScope postpone(info.isolate());
+
+ if (FLAG_trace_opt) {
+ // TODO(titzer): record and report full stats here.
+ PrintF("[optimizing asm ");
+ function->ShortPrint();
+ PrintF("]\n");
+ }
+
+ if (!Parser::Parse(&info)) return MaybeHandle<Code>();
+ if (!Rewriter::Rewrite(&info)) return MaybeHandle<Code>();
+ if (!Scope::Analyze(&info)) return MaybeHandle<Code>();
+ if (FLAG_turbo_deoptimization && !EnsureDeoptimizationSupport(&info)) {
+ return MaybeHandle<Code>();
+ }
+
+ info.SetOptimizing(BailoutId::None(),
+ Handle<Code>(function->shared()->code()));
+
+ info.MarkAsContextSpecializing();
+ info.MarkAsTypingEnabled();
+ info.MarkAsInliningDisabled();
+ compiler::Pipeline pipeline(&info);
+ pipeline.GenerateCode();
+ if (!info.code().is_null()) return info.code();
+ }
+
if (function->shared()->is_compiled()) {
return Handle<Code>(function->shared()->code());
}
=======================================
--- /trunk/src/compiler.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/compiler.h Fri Sep 19 18:04:45 2014 UTC
@@ -196,6 +196,8 @@
bool is_context_specializing() const { return
GetFlag(kContextSpecializing); }
void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
+
+ void MarkAsInliningDisabled() { SetFlag(kInliningEnabled, false); }
bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
=======================================
--- /trunk/src/counters.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/counters.h Fri Sep 19 18:04:45 2014 UTC
@@ -295,7 +295,7 @@
/* Generic range histograms
*/ \
HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000,
101) \
HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000,
101) \
- HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimi.Undershot, 0, 10000,
101)
+ HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000,
101)
#define HISTOGRAM_TIMER_LIST(HT) \
/* Garbage collection timers. */ \
=======================================
--- /trunk/src/d8.cc Thu Sep 18 00:05:06 2014 UTC
+++ /trunk/src/d8.cc Fri Sep 19 18:04:45 2014 UTC
@@ -1619,6 +1619,7 @@
v8::V8::InitializeICU(options.icu_data_file);
v8::Platform* platform = v8::platform::CreateDefaultPlatform();
v8::V8::InitializePlatform(platform);
+ v8::V8::Initialize();
#ifdef V8_USE_EXTERNAL_STARTUP_DATA
StartupDataHandler startup_data(options.natives_blob,
options.snapshot_blob);
#endif
=======================================
--- /trunk/src/deoptimizer.h Tue Sep 16 07:50:38 2014 UTC
+++ /trunk/src/deoptimizer.h Fri Sep 19 18:04:45 2014 UTC
@@ -102,15 +102,19 @@
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
struct JumpTableEntry : public ZoneObject {
- inline JumpTableEntry(Address entry,
- Deoptimizer::BailoutType type,
+ inline JumpTableEntry(Address entry, const char* the_mnemonic,
+ const char* the_reason, Deoptimizer::BailoutType
type,
bool frame)
: label(),
address(entry),
+ mnemonic(the_mnemonic),
+ reason(the_reason),
bailout_type(type),
- needs_frame(frame) { }
+ needs_frame(frame) {}
Label label;
Address address;
+ const char* mnemonic;
+ const char* reason;
Deoptimizer::BailoutType bailout_type;
bool needs_frame;
};
=======================================
--- /trunk/src/factory.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/factory.cc Fri Sep 19 18:04:45 2014 UTC
@@ -2328,9 +2328,12 @@
Handle<MapCache>(MapCache::cast(context->map_cache()));
Handle<Object> result = Handle<Object>(cache->Lookup(*keys), isolate());
if (result->IsMap()) return Handle<Map>::cast(result);
- // Create a new map and add it to the cache.
- Handle<Map> map = Map::Create(
- handle(context->object_function()), keys->length());
+ int length = keys->length();
+ // Create a new map and add it to the cache. Reuse the initial map of the
+ // Object function if the literal has no predeclared properties.
+ Handle<Map> map = length == 0
+ ? handle(context->object_function()->initial_map())
+ : Map::Create(isolate(), length);
AddToMapCache(context, keys, map);
return map;
}
=======================================
--- /trunk/src/flag-definitions.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/flag-definitions.h Fri Sep 19 18:04:45 2014 UTC
@@ -10,6 +10,12 @@
// which can be included multiple times in different modes. It expects to
have
// a mode defined before it's included. The modes are FLAG_MODE_... below:
+#define DEFINE_IMPLICATION(whenflag, thenflag) \
+ DEFINE_VALUE_IMPLICATION(whenflag, thenflag, true)
+
+#define DEFINE_NEG_IMPLICATION(whenflag, thenflag) \
+ DEFINE_VALUE_IMPLICATION(whenflag, thenflag, false)
+
// We want to declare the names of the variables for the header file.
Normally
// this will just be an extern declaration, but for a readonly flag we let
the
// compiler make better optimizations by giving it the value.
@@ -45,11 +51,8 @@
// We produce the code to set flags when it is implied by another flag.
#elif defined(FLAG_MODE_DEFINE_IMPLICATIONS)
-#define DEFINE_IMPLICATION(whenflag, thenflag) \
- if (FLAG_##whenflag) FLAG_##thenflag = true;
-
-#define DEFINE_NEG_IMPLICATION(whenflag, thenflag) \
- if (FLAG_##whenflag) FLAG_##thenflag = false;
+#define DEFINE_VALUE_IMPLICATION(whenflag, thenflag, value) \
+ if (FLAG_##whenflag) FLAG_##thenflag = value;
#else
#error No mode supplied when including flags.defs
@@ -68,14 +71,10 @@
#define FLAG_ALIAS(ftype, ctype, alias, nam)
#endif
-#ifndef DEFINE_IMPLICATION
-#define DEFINE_IMPLICATION(whenflag, thenflag)
+#ifndef DEFINE_VALUE_IMPLICATION
+#define DEFINE_VALUE_IMPLICATION(whenflag, thenflag, value)
#endif
-#ifndef DEFINE_NEG_IMPLICATION
-#define DEFINE_NEG_IMPLICATION(whenflag, thenflag)
-#endif
-
#define COMMA ,
#ifdef FLAG_MODE_DECLARE
@@ -162,6 +161,7 @@
DEFINE_BOOL(harmony_classes, false, "enable harmony classes")
DEFINE_BOOL(harmony_object_literals, false,
"enable harmony object literal extensions")
+DEFINE_BOOL(harmony_regexps, false, "enable regexp-related harmony
features")
DEFINE_BOOL(harmony, false, "enable all harmony features (except proxies)")
DEFINE_IMPLICATION(harmony, harmony_scoping)
@@ -174,6 +174,7 @@
DEFINE_IMPLICATION(harmony, harmony_arrow_functions)
DEFINE_IMPLICATION(harmony, harmony_classes)
DEFINE_IMPLICATION(harmony, harmony_object_literals)
+DEFINE_IMPLICATION(harmony, harmony_regexps)
DEFINE_IMPLICATION(harmony_modules, harmony_scoping)
DEFINE_IMPLICATION(harmony_classes, harmony_scoping)
DEFINE_IMPLICATION(harmony_classes, harmony_object_literals)
@@ -212,6 +213,8 @@
"Enables optimizations which favor memory size over execution "
"speed.")
+DEFINE_VALUE_IMPLICATION(optimize_for_size, max_semi_space_size, 1)
+
// Flags for data representation optimizations
DEFINE_BOOL(unbox_double_arrays, true, "automatically unbox arrays of
doubles")
DEFINE_BOOL(string_slices, true, "use string slices")
@@ -330,6 +333,7 @@
DEFINE_BOOL(trace_turbo, false, "trace generated TurboFan IR")
DEFINE_BOOL(trace_turbo_types, true, "trace generated TurboFan types")
DEFINE_BOOL(trace_turbo_scheduler, false, "trace generated TurboFan
scheduler")
+DEFINE_BOOL(turbo_asm, false, "enable TurboFan for asm.js code")
DEFINE_BOOL(turbo_verify, false, "verify TurboFan graphs at each phase")
DEFINE_BOOL(turbo_stats, false, "print TurboFan statistics")
#if V8_TURBOFAN_BACKEND
@@ -468,9 +472,7 @@
DEFINE_BOOL(hard_abort, true, "abort by crashing")
// execution.cc
-// Slightly less than 1MB, since Windows' default stack size for
-// the main execution thread is 1MB for both 32 and 64-bit.
-DEFINE_INT(stack_size, 984,
+DEFINE_INT(stack_size, V8_DEFAULT_STACK_SIZE_KB,
"default size of stack region v8 is allowed to use (in kBytes)")
// frames.cc
@@ -927,6 +929,7 @@
#undef DEFINE_ARGS
#undef DEFINE_IMPLICATION
#undef DEFINE_NEG_IMPLICATION
+#undef DEFINE_VALUE_IMPLICATION
#undef DEFINE_ALIAS_BOOL
#undef DEFINE_ALIAS_INT
#undef DEFINE_ALIAS_STRING
=======================================
--- /trunk/src/full-codegen.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/full-codegen.cc Fri Sep 19 18:04:45 2014 UTC
@@ -833,8 +833,7 @@
void FullCodeGenerator::VisitSuperReference(SuperReference* super) {
- DCHECK(FLAG_harmony_classes);
- UNIMPLEMENTED();
+ __ CallRuntime(Runtime::kThrowUnsupportedSuperError, 0);
}
=======================================
--- /trunk/src/full-codegen.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/full-codegen.h Fri Sep 19 18:04:45 2014 UTC
@@ -477,6 +477,7 @@
// Platform-specific code sequences for calls
void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION);
void EmitCallWithLoadIC(Call* expr);
+ void EmitSuperCallWithLoadIC(Call* expr);
void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
// Platform-specific code for inline runtime calls.
@@ -520,6 +521,8 @@
// The receiver is left on the stack by the IC.
void EmitNamedPropertyLoad(Property* expr);
+ void EmitNamedSuperPropertyLoad(Property* expr);
+
// Load a value from a keyed property.
// The receiver and the key is left on the stack by the IC.
void EmitKeyedPropertyLoad(Property* expr);
@@ -560,6 +563,8 @@
// accumulator.
void EmitKeyedPropertyAssignment(Assignment* expr);
+ void EmitLoadHomeObject(SuperReference* expr);
+
void CallIC(Handle<Code> code,
TypeFeedbackId id = TypeFeedbackId::None());
=======================================
--- /trunk/src/globals.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/globals.h Fri Sep 19 18:04:45 2014 UTC
@@ -68,6 +68,18 @@
// Determine whether the architecture uses an out-of-line constant pool.
#define V8_OOL_CONSTANT_POOL 0
+#ifdef V8_TARGET_ARCH_ARM
+// Set stack limit lower for ARM than for other architectures because
+// stack allocating MacroAssembler takes 120K bytes.
+// See issue crbug.com/405338
+#define V8_DEFAULT_STACK_SIZE_KB 864
+#else
+// Slightly less than 1MB, since Windows' default stack size for
+// the main execution thread is 1MB for both 32 and 64-bit.
+#define V8_DEFAULT_STACK_SIZE_KB 984
+#endif
+
+
// Support for alternative bool type. This is only enabled if the code is
// compiled with USE_MYBOOL defined. This catches some nasty type bugs.
// For instance, 'bool b = "false";' results in b == true! This is a hidden
=======================================
--- /trunk/src/heap/gc-idle-time-handler.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/heap/gc-idle-time-handler.cc Fri Sep 19 18:04:45 2014 UTC
@@ -110,7 +110,8 @@
// that this currently may trigger a full garbage collection.
GCIdleTimeAction GCIdleTimeHandler::Compute(size_t idle_time_in_ms,
HeapState heap_state) {
- if (ScavangeMayHappenSoon(
+ if (idle_time_in_ms <= kMaxFrameRenderingIdleTime &&
+ ScavangeMayHappenSoon(
heap_state.available_new_space_memory,
heap_state.new_space_allocation_throughput_in_bytes_per_ms) &&
idle_time_in_ms >=
=======================================
--- /trunk/src/heap/heap-inl.h Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/heap/heap-inl.h Fri Sep 19 18:04:45 2014 UTC
@@ -15,6 +15,7 @@
#include "src/heap-profiler.h"
#include "src/isolate.h"
#include "src/list-inl.h"
+#include "src/msan.h"
#include "src/objects.h"
namespace v8 {
@@ -495,7 +496,7 @@
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
// Check if there is potentially a memento behind the object. If
- // the last word of the momento is on another page we return
+ // the last word of the memento is on another page we return
// immediately.
Address object_address = object->address();
Address memento_address = object_address + object->Size();
@@ -505,7 +506,12 @@
}
HeapObject* candidate = HeapObject::FromAddress(memento_address);
- if (candidate->map() != allocation_memento_map()) return NULL;
+ Map* candidate_map = candidate->map();
+ // This fast check may peek at an uninitialized word. However, the slow
check
+ // below (memento_address == top) ensures that this is safe. Mark the
word as
+ // initialized to silence MemorySanitizer warnings.
+ MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
+ if (candidate_map != allocation_memento_map()) return NULL;
// Either the object is the last object in the new space, or there is
another
// object of at least word size (the header map word) following it, so
=======================================
--- /trunk/src/heap/heap.h Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/heap/heap.h Fri Sep 19 18:04:45 2014 UTC
@@ -287,6 +287,8 @@
V(global_string, "global") \
V(ignore_case_string, "ignoreCase") \
V(multiline_string, "multiline") \
+ V(sticky_string, "sticky") \
+ V(harmony_regexps_string, "harmony_regexps") \
V(input_string, "input") \
V(index_string, "index") \
V(last_index_string, "lastIndex") \
=======================================
--- /trunk/src/hydrogen.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/hydrogen.cc Fri Sep 19 18:04:45 2014 UTC
@@ -7138,6 +7138,10 @@
uint32_t array_index;
if (constant->IsString() &&
!Handle<String>::cast(constant)->AsArrayIndex(&array_index)) {
+ if (!constant->IsUniqueName()) {
+ constant = isolate()->factory()->InternalizeString(
+ Handle<String>::cast(constant));
+ }
HInstruction* instr =
BuildNamedAccess(access_type, expr->id(), return_id, expr, obj,
Handle<String>::cast(constant), val, false);
@@ -11001,7 +11005,8 @@
}
// Copy in-object properties.
- if (boilerplate_object->map()->NumberOfFields() != 0) {
+ if (boilerplate_object->map()->NumberOfFields() != 0 ||
+ boilerplate_object->map()->unused_property_fields() > 0) {
BuildEmitInObjectProperties(boilerplate_object, object, site_context,
pretenure_flag);
}
@@ -11215,7 +11220,10 @@
void HOptimizedGraphBuilder::VisitSuperReference(SuperReference* expr) {
- UNREACHABLE();
+ DCHECK(!HasStackOverflow());
+ DCHECK(current_block() != NULL);
+ DCHECK(current_block()->HasPredecessor());
+ return Bailout(kSuperReference);
}
=======================================
--- /trunk/src/hydrogen.h Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/hydrogen.h Fri Sep 19 18:04:45 2014 UTC
@@ -1727,27 +1727,6 @@
Direction direction_;
bool finished_;
};
-
- template <class A, class P1>
- void DeoptimizeIf(P1 p1, char* const reason) {
- IfBuilder builder(this);
- builder.If<A>(p1);
- builder.ThenDeopt(reason);
- }
-
- template <class A, class P1, class P2>
- void DeoptimizeIf(P1 p1, P2 p2, const char* reason) {
- IfBuilder builder(this);
- builder.If<A>(p1, p2);
- builder.ThenDeopt(reason);
- }
-
- template <class A, class P1, class P2, class P3>
- void DeoptimizeIf(P1 p1, P2 p2, P3 p3, const char* reason) {
- IfBuilder builder(this);
- builder.If<A>(p1, p2, p3);
- builder.ThenDeopt(reason);
- }
HValue* BuildNewElementsCapacity(HValue* old_capacity);
=======================================
--- /trunk/src/ia32/assembler-ia32.cc Wed Sep 3 08:32:14 2014 UTC
+++ /trunk/src/ia32/assembler-ia32.cc Fri Sep 19 18:04:45 2014 UTC
@@ -2012,6 +2012,15 @@
EMIT(0x5C);
emit_sse_operand(dst, src);
}
+
+
+void Assembler::subsd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ EMIT(0xF2);
+ EMIT(0x0F);
+ EMIT(0x5C);
+ emit_sse_operand(dst, src);
+}
void Assembler::divsd(XMMRegister dst, XMMRegister src) {
=======================================
--- /trunk/src/ia32/assembler-ia32.h Thu Aug 7 08:39:21 2014 UTC
+++ /trunk/src/ia32/assembler-ia32.h Fri Sep 19 18:04:45 2014 UTC
@@ -956,6 +956,7 @@
void addsd(XMMRegister dst, XMMRegister src);
void addsd(XMMRegister dst, const Operand& src);
void subsd(XMMRegister dst, XMMRegister src);
+ void subsd(XMMRegister dst, const Operand& src);
void mulsd(XMMRegister dst, XMMRegister src);
void mulsd(XMMRegister dst, const Operand& src);
void divsd(XMMRegister dst, XMMRegister src);
=======================================
--- /trunk/src/ia32/full-codegen-ia32.cc Fri Sep 19 00:05:16 2014 UTC
+++ /trunk/src/ia32/full-codegen-ia32.cc Fri Sep 19 18:04:45 2014 UTC
@@ -1276,6 +1276,25 @@
Comment cmnt(masm_, "[ VariableProxy");
EmitVariableLoad(expr);
}
+
+
+void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
+ Comment cnmt(masm_, "[ SuperReference ");
+
+ __ mov(LoadDescriptor::ReceiverRegister(),
+ Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+
+ Handle<Symbol>
home_object_symbol(isolate()->heap()->home_object_symbol());
+ __ mov(LoadDescriptor::NameRegister(), home_object_symbol);
+
+ CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
+
+ __ cmp(eax, isolate()->factory()->undefined_value());
+ Label done;
+ __ j(not_equal, &done);
+ __ CallRuntime(Runtime::kThrowNonMethodError, 0);
+ __ bind(&done);
+}
void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
@@ -2229,6 +2248,21 @@
CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
}
}
+
+
+void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+ DCHECK(prop->IsSuperAccess());
+
+ SuperReference* super_ref = prop->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ push(eax);
+ VisitForStackValue(super_ref->this_var());
+ __ push(Immediate(key->value()));
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
@@ -2520,9 +2554,13 @@
Expression* key = expr->key();
if (key->IsPropertyName()) {
- VisitForAccumulatorValue(expr->obj());
- __ Move(LoadDescriptor::ReceiverRegister(), result_register());
- EmitNamedPropertyLoad(expr);
+ if (!expr->IsSuperAccess()) {
+ VisitForAccumulatorValue(expr->obj());
+ __ Move(LoadDescriptor::ReceiverRegister(), result_register());
+ EmitNamedPropertyLoad(expr);
+ } else {
+ EmitNamedSuperPropertyLoad(expr);
+ }
PrepareForBailoutForId(expr->LoadId(), TOS_REG);
context()->Plug(eax);
} else {
@@ -2561,6 +2599,7 @@
} else {
// Load the function from the receiver.
DCHECK(callee->IsProperty());
+ DCHECK(!callee->AsProperty()->IsSuperAccess());
__ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
EmitNamedPropertyLoad(callee->AsProperty());
PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
@@ -2572,6 +2611,42 @@
EmitCall(expr, call_type);
}
+
+void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
+ Expression* callee = expr->expression();
+ DCHECK(callee->IsProperty());
+ Property* prop = callee->AsProperty();
+ DCHECK(prop->IsSuperAccess());
+
+ SetSourcePosition(prop->position());
+ Literal* key = prop->key()->AsLiteral();
+ DCHECK(!key->value()->IsSmi());
+ // Load the function from the receiver.
+ SuperReference* super_ref =
callee->AsProperty()->obj()->AsSuperReference();
+ EmitLoadHomeObject(super_ref);
+ __ push(eax);
+ VisitForAccumulatorValue(super_ref->this_var());
+ __ push(eax);
+ __ push(Operand(esp, kPointerSize));
+ __ push(eax);
+ __ push(Immediate(key->value()));
+ // Stack here:
+ // - home_object
+ // - this (receiver)
+ // - home_object <-- LoadFromSuper will pop here and below.
+ // - this (receiver)
+ // - key
+ __ CallRuntime(Runtime::kLoadFromSuper, 3);
+
+ // Replace home_object with target function.
+ __ mov(Operand(esp, kPointerSize), eax);
+
+ // Stack here:
+ // - target function
+ // - this (receiver)
+ EmitCall(expr, CallICState::METHOD);
+}
+
// Code common for calls using the IC.
void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
@@ -2733,15 +2808,21 @@
} else if (call_type == Call::PROPERTY_CALL) {
Property* property = callee->AsProperty();
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(property->obj());
- }
- if (property->key()->IsPropertyName()) {
- EmitCallWithLoadIC(expr);
+ bool is_named_call = property->key()->IsPropertyName();
+ // super.x() is handled in EmitCallWithLoadIC.
+ if (property->IsSuperAccess() && is_named_call) {
+ EmitSuperCallWithLoadIC(expr);
} else {
- EmitKeyedCallWithLoadIC(expr, property->key());
+ {
+ PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (is_named_call) {
+ EmitCallWithLoadIC(expr);
+ } else {
+ EmitKeyedCallWithLoadIC(expr, property->key());
+ }
}
-
} else {
DCHECK(call_type == Call::OTHER_CALL);
// Call to an arbitrary expression not handled specially above.
=======================================
***Additional files exist in this changeset.***
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.