Revision: 24142
Author: [email protected]
Date: Tue Sep 23 10:40:53 2014 UTC
Log: Version 3.29.83 (based on bleeding_edge revision r24139)
Performance and stability improvements on all platforms.
https://code.google.com/p/v8/source/detail?r=24142
Modified:
/trunk/ChangeLog
/trunk/PRESUBMIT.py
/trunk/src/arm/code-stubs-arm.cc
/trunk/src/arm/macro-assembler-arm.cc
/trunk/src/arm/macro-assembler-arm.h
/trunk/src/arm64/code-stubs-arm64.cc
/trunk/src/arm64/macro-assembler-arm64.cc
/trunk/src/arm64/macro-assembler-arm64.h
/trunk/src/base/macros.h
/trunk/src/compiler/arm/code-generator-arm.cc
/trunk/src/compiler/arm64/code-generator-arm64.cc
/trunk/src/compiler/code-generator.cc
/trunk/src/compiler/code-generator.h
/trunk/src/compiler/ia32/code-generator-ia32.cc
/trunk/src/compiler/js-builtin-reducer.cc
/trunk/src/compiler/js-builtin-reducer.h
/trunk/src/compiler/x64/code-generator-x64.cc
/trunk/src/ia32/code-stubs-ia32.cc
/trunk/src/ia32/lithium-codegen-ia32.cc
/trunk/src/ia32/macro-assembler-ia32.cc
/trunk/src/ia32/macro-assembler-ia32.h
/trunk/src/ic/arm/ic-compiler-arm.cc
/trunk/src/ic/arm64/ic-compiler-arm64.cc
/trunk/src/ic/ia32/ic-compiler-ia32.cc
/trunk/src/ic/ic.cc
/trunk/src/ic/ic.h
/trunk/src/ic/mips/ic-compiler-mips.cc
/trunk/src/ic/mips64/ic-compiler-mips64.cc
/trunk/src/ic/x64/ic-compiler-x64.cc
/trunk/src/ic/x87/ic-compiler-x87.cc
/trunk/src/mips/code-stubs-mips.cc
/trunk/src/mips/macro-assembler-mips.cc
/trunk/src/mips/macro-assembler-mips.h
/trunk/src/mips64/code-stubs-mips64.cc
/trunk/src/mips64/macro-assembler-mips64.cc
/trunk/src/mips64/macro-assembler-mips64.h
/trunk/src/version.cc
/trunk/src/x64/code-stubs-x64.cc
/trunk/src/x64/macro-assembler-x64.cc
/trunk/src/x64/macro-assembler-x64.h
/trunk/src/x87/code-stubs-x87.cc
/trunk/src/x87/macro-assembler-x87.cc
/trunk/src/x87/macro-assembler-x87.h
/trunk/test/cctest/compiler/test-js-typed-lowering.cc
/trunk/test/mjsunit/regress/string-set-char-deopt.js
=======================================
--- /trunk/ChangeLog Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/ChangeLog Tue Sep 23 10:40:53 2014 UTC
@@ -1,3 +1,8 @@
+2014-09-23: Version 3.29.83
+
+ Performance and stability improvements on all platforms.
+
+
2014-09-23: Version 3.29.82
Fix escaped index JSON parsing (Chromium issue 416449).
=======================================
--- /trunk/PRESUBMIT.py Tue Sep 16 07:50:38 2014 UTC
+++ /trunk/PRESUBMIT.py Tue Sep 23 10:40:53 2014 UTC
@@ -34,6 +34,32 @@
import sys
+_EXCLUDED_PATHS = (
+ r"^test[\\\/].*",
+ r"^testing[\\\/].*",
+ r"^third_party[\\\/].*",
+ r"^tools[\\\/].*",
+)
+
+
+# Regular expression that matches code only used for test binaries
+# (best effort).
+_TEST_CODE_EXCLUDED_PATHS = (
+ r'.+-unittest\.cc',
+ # Has a method VisitForTest().
+ r'src[\\\/]compiler[\\\/]ast-graph-builder\.cc',
+ # Test extension.
+ r'src[\\\/]extensions[\\\/]gc-extension\.cc',
+)
+
+
+_TEST_ONLY_WARNING = (
+ 'You might be calling functions intended only for testing from\n'
+ 'production code. It is OK to ignore this warning if you know what\n'
+ 'you are doing, as the heuristics used to detect the situation are\n'
+ 'not perfect. The commit queue will not block on this warning.')
+
+
def _V8PresubmitChecks(input_api, output_api):
"""Runs the V8 presubmit checks."""
import sys
@@ -113,6 +139,49 @@
return results
+def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
+ """Attempts to prevent use of functions intended only for testing in
+ non-testing code. For now this is just a best-effort implementation
+ that ignores header files and may have some false positives. A
+ better implementation would probably need a proper C++ parser.
+ """
+ # We only scan .cc files, as the declaration of for-testing functions in
+ # header files are hard to distinguish from calls to such functions
without a
+ # proper C++ parser.
+ file_inclusion_pattern = r'.+\.cc'
+
+ base_function_pattern = r'[ :]test::[^\s]+|ForTest(ing)?|for_test(ing)?'
+ inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' %
base_function_pattern)
+ comment_pattern = input_api.re.compile(r'//.*(%s)' %
base_function_pattern)
+ exclusion_pattern = input_api.re.compile(
+ r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
+ base_function_pattern, base_function_pattern))
+
+ def FilterFile(affected_file):
+ black_list = (_EXCLUDED_PATHS +
+ _TEST_CODE_EXCLUDED_PATHS +
+ input_api.DEFAULT_BLACK_LIST)
+ return input_api.FilterSourceFile(
+ affected_file,
+ white_list=(file_inclusion_pattern, ),
+ black_list=black_list)
+
+ problems = []
+ for f in input_api.AffectedSourceFiles(FilterFile):
+ local_path = f.LocalPath()
+ for line_number, line in f.ChangedContents():
+ if (inclusion_pattern.search(line) and
+ not comment_pattern.search(line) and
+ not exclusion_pattern.search(line)):
+ problems.append(
+ '%s:%d\n %s' % (local_path, line_number, line.strip()))
+
+ if problems:
+ return [output_api.PresubmitPromptOrNotify(_TEST_ONLY_WARNING,
problems)]
+ else:
+ return []
+
+
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
@@ -122,6 +191,8 @@
input_api, output_api))
results.extend(_V8PresubmitChecks(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
+ results.extend(
+ _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
return results
=======================================
--- /trunk/src/arm/code-stubs-arm.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/arm/code-stubs-arm.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3465,8 +3465,8 @@
__ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
__ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss);
- __ JumpIfNotUniqueName(tmp2, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
// Unique names are compared by identity.
__ cmp(left, right);
@@ -3698,7 +3698,7 @@
__ ldr(entity_name, FieldMemOperand(entity_name,
HeapObject::kMapOffset));
__ ldrb(entity_name,
FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entity_name, miss);
+ __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
__ bind(&good);
// Restore the properties.
@@ -3868,7 +3868,7 @@
__ ldr(entry_key, FieldMemOperand(entry_key,
HeapObject::kMapOffset));
__ ldrb(entry_key,
FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/arm/macro-assembler-arm.cc Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/arm/macro-assembler-arm.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3199,8 +3199,8 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Register reg,
- Label* not_unique_name) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
+ Label*
not_unique_name) {
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Label succeed;
tst(reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
=======================================
--- /trunk/src/arm/macro-assembler-arm.h Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/arm/macro-assembler-arm.h Tue Sep 23 10:40:53 2014 UTC
@@ -1340,7 +1340,7 @@
void JumpIfInstanceTypeIsNotSequentialOneByte(Register type, Register
scratch,
Label* failure);
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name);
void EmitSeqStringSetCharCheck(Register string,
Register index,
=======================================
--- /trunk/src/arm64/code-stubs-arm64.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/arm64/code-stubs-arm64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3370,8 +3370,8 @@
// To avoid a miss, each instance type should be either SYMBOL_TYPE or it
// should have kInternalizedTag set.
- __ JumpIfNotUniqueName(lhs_instance_type, &miss);
- __ JumpIfNotUniqueName(rhs_instance_type, &miss);
+ __ JumpIfNotUniqueNameInstanceType(lhs_instance_type, &miss);
+ __ JumpIfNotUniqueNameInstanceType(rhs_instance_type, &miss);
// Unique names are compared by identity.
STATIC_ASSERT(EQUAL == 0);
@@ -4488,7 +4488,7 @@
__ Ldr(entity_name, FieldMemOperand(entity_name,
HeapObject::kMapOffset));
__ Ldrb(entity_name,
FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entity_name, miss);
+ __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
__ Bind(&good);
}
@@ -4575,7 +4575,7 @@
// Check if the entry name is not a unique name.
__ Ldr(entry_key, FieldMemOperand(entry_key,
HeapObject::kMapOffset));
__ Ldrb(entry_key, FieldMemOperand(entry_key,
Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/arm64/macro-assembler-arm64.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/arm64/macro-assembler-arm64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -2768,8 +2768,8 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Register type,
- Label* not_unique_name) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register type,
+ Label*
not_unique_name) {
STATIC_ASSERT((kInternalizedTag == 0) && (kStringTag == 0));
// if ((type is string && type is internalized) || type == SYMBOL_TYPE) {
// continue
=======================================
--- /trunk/src/arm64/macro-assembler-arm64.h Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/arm64/macro-assembler-arm64.h Tue Sep 23 10:40:53 2014 UTC
@@ -1074,7 +1074,7 @@
Register first_object_instance_type, Register
second_object_instance_type,
Register scratch1, Register scratch2, Label* failure);
- void JumpIfNotUniqueName(Register type, Label* not_unique_name);
+ void JumpIfNotUniqueNameInstanceType(Register type, Label*
not_unique_name);
// ---- Calling / Jumping helpers ----
=======================================
--- /trunk/src/base/macros.h Tue Sep 9 00:05:04 2014 UTC
+++ /trunk/src/base/macros.h Tue Sep 23 10:40:53 2014 UTC
@@ -230,7 +230,7 @@
// WARNING: if Dest or Source is a non-POD type, the result of the memcpy
// is likely to surprise you.
template <class Dest, class Source>
-inline Dest bit_cast(const Source& source) {
+V8_INLINE Dest bit_cast(Source const& source) {
COMPILE_ASSERT(sizeof(Dest) == sizeof(Source), VerifySizesAreEqual);
Dest dest;
=======================================
--- /trunk/src/compiler/arm/code-generator-arm.cc Tue Sep 23 08:38:19 2014
UTC
+++ /trunk/src/compiler/arm/code-generator-arm.cc Tue Sep 23 10:40:53 2014
UTC
@@ -137,6 +137,7 @@
switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: {
+ EnsureSpaceForLazyDeopt();
if (instr->InputAt(0)->IsImmediate()) {
__ Call(Handle<Code>::cast(i.InputHeapObject(0)),
RelocInfo::CODE_TARGET);
@@ -150,6 +151,7 @@
break;
}
case kArchCallJSFunction: {
+ EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
if (FLAG_debug_code) {
// Check the function's context matches the context argument.
@@ -842,6 +844,27 @@
void CodeGenerator::AddNopForSmiCodeInlining() {
// On 32-bit ARM we do not insert nops for inlined Smi code.
}
+
+
+void CodeGenerator::EnsureSpaceForLazyDeopt() {
+ int space_needed = Deoptimizer::patch_size();
+ if (!linkage()->info()->IsStub()) {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ if (current_pc < last_lazy_deopt_pc_ + space_needed) {
+ // Block literal pool emission for duration of padding.
+ v8::internal::Assembler::BlockConstPoolScope
block_const_pool(masm());
+ int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
+ DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
+ while (padding_size > 0) {
+ __ nop();
+ padding_size -= v8::internal::Assembler::kInstrSize;
+ }
+ }
+ }
+ MarkLazyDeoptSite();
+}
#undef __
=======================================
--- /trunk/src/compiler/arm64/code-generator-arm64.cc Tue Sep 23 08:38:19
2014 UTC
+++ /trunk/src/compiler/arm64/code-generator-arm64.cc Tue Sep 23 10:40:53
2014 UTC
@@ -132,6 +132,7 @@
InstructionCode opcode = instr->opcode();
switch (ArchOpcodeField::decode(opcode)) {
case kArchCallCodeObject: {
+ EnsureSpaceForLazyDeopt();
if (instr->InputAt(0)->IsImmediate()) {
__ Call(Handle<Code>::cast(i.InputHeapObject(0)),
RelocInfo::CODE_TARGET);
@@ -144,6 +145,7 @@
break;
}
case kArchCallJSFunction: {
+ EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
if (FLAG_debug_code) {
// Check the function's context matches the context argument.
@@ -843,6 +845,29 @@
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
+
+
+void CodeGenerator::EnsureSpaceForLazyDeopt() {
+ int space_needed = Deoptimizer::patch_size();
+ if (!linkage()->info()->IsStub()) {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ intptr_t current_pc = masm()->pc_offset();
+
+ if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
+ intptr_t padding_size = last_lazy_deopt_pc_ + space_needed -
current_pc;
+ DCHECK((padding_size % kInstructionSize) == 0);
+ InstructionAccurateScope instruction_accurate(
+ masm(), padding_size / kInstructionSize);
+
+ while (padding_size > 0) {
+ __ nop();
+ padding_size -= kInstructionSize;
+ }
+ }
+ }
+ MarkLazyDeoptSite();
+}
#undef __
=======================================
--- /trunk/src/compiler/code-generator.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/compiler/code-generator.cc Tue Sep 23 10:40:53 2014 UTC
@@ -21,7 +21,8 @@
safepoints_(code->zone()),
deoptimization_states_(code->zone()),
deoptimization_literals_(code->zone()),
- translations_(code->zone()) {}
+ translations_(code->zone()),
+ last_lazy_deopt_pc_(0) {}
Handle<Code> CodeGenerator::GenerateCode() {
@@ -242,6 +243,7 @@
}
if (needs_frame_state) {
+ MarkLazyDeoptSite();
// If the frame state is present, it starts at argument 1
// (just after the code address).
InstructionOperandConverter converter(this, instr);
@@ -387,8 +389,7 @@
isolate()->factory()->NewNumberFromInt(constant.ToInt32());
break;
case Constant::kFloat64:
- constant_object =
- isolate()->factory()->NewHeapNumber(constant.ToFloat64());
+ constant_object =
isolate()->factory()->NewNumber(constant.ToFloat64());
break;
case Constant::kHeapObject:
constant_object = constant.ToHeapObject();
@@ -402,6 +403,11 @@
UNREACHABLE();
}
}
+
+
+void CodeGenerator::MarkLazyDeoptSite() {
+ last_lazy_deopt_pc_ = masm()->pc_offset();
+}
#if !V8_TURBOFAN_BACKEND
=======================================
--- /trunk/src/compiler/code-generator.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/compiler/code-generator.h Tue Sep 23 10:40:53 2014 UTC
@@ -98,8 +98,10 @@
void AddTranslationForOperand(Translation* translation, Instruction*
instr,
InstructionOperand* op);
void AddNopForSmiCodeInlining();
+ void EnsureSpaceForLazyDeopt();
+ void MarkLazyDeoptSite();
+
//
===========================================================================
-
struct DeoptimizationState : ZoneObject {
public:
BailoutId bailout_id() const { return bailout_id_; }
@@ -126,6 +128,7 @@
ZoneDeque<DeoptimizationState*> deoptimization_states_;
ZoneDeque<Handle<Object> > deoptimization_literals_;
TranslationBuffer translations_;
+ int last_lazy_deopt_pc_;
};
} // namespace compiler
=======================================
--- /trunk/src/compiler/ia32/code-generator-ia32.cc Tue Sep 23 08:38:19
2014 UTC
+++ /trunk/src/compiler/ia32/code-generator-ia32.cc Tue Sep 23 10:40:53
2014 UTC
@@ -112,6 +112,7 @@
switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: {
+ EnsureSpaceForLazyDeopt();
if (HasImmediateInput(instr, 0)) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ call(code, RelocInfo::CODE_TARGET);
@@ -123,6 +124,7 @@
break;
}
case kArchCallJSFunction: {
+ EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
if (FLAG_debug_code) {
// Check the function's context matches the context argument.
@@ -931,6 +933,21 @@
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
+
+
+void CodeGenerator::EnsureSpaceForLazyDeopt() {
+ int space_needed = Deoptimizer::patch_size();
+ if (!linkage()->info()->IsStub()) {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ if (current_pc < last_lazy_deopt_pc_ + space_needed) {
+ int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
+ __ Nop(padding_size);
+ }
+ }
+ MarkLazyDeoptSite();
+}
#undef __
=======================================
--- /trunk/src/compiler/js-builtin-reducer.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/compiler/js-builtin-reducer.cc Tue Sep 23 10:40:53 2014 UTC
@@ -47,24 +47,36 @@
Handle<JSFunction> function =
Handle<JSFunction>::cast(m.Value().handle());
return function->shared()->builtin_function_id();
}
+
+ // Determines whether the call takes zero inputs.
+ bool InputsMatchZero() { return GetJSCallArity() == 0; }
// Determines whether the call takes one input of the given type.
- bool InputsMatch(Type* t1) {
+ bool InputsMatchOne(Type* t1) {
return GetJSCallArity() == 1 &&
NodeProperties::GetBounds(GetJSCallInput(0)).upper->Is(t1);
}
// Determines whether the call takes two inputs of the given types.
- bool InputsMatch(Type* t1, Type* t2) {
+ bool InputsMatchTwo(Type* t1, Type* t2) {
return GetJSCallArity() == 2 &&
NodeProperties::GetBounds(GetJSCallInput(0)).upper->Is(t1) &&
NodeProperties::GetBounds(GetJSCallInput(1)).upper->Is(t2);
}
+
+ // Determines whether the call takes inputs all of the given type.
+ bool InputsMatchAll(Type* t) {
+ for (int i = 0; i < GetJSCallArity(); i++) {
+ if (!NodeProperties::GetBounds(GetJSCallInput(i)).upper->Is(t)) {
+ return false;
+ }
+ }
+ return true;
+ }
Node* left() { return GetJSCallInput(0); }
Node* right() { return GetJSCallInput(1); }
- protected:
int GetJSCallArity() {
DCHECK_EQ(IrOpcode::kJSCallFunction, node_->opcode());
// Skip first (i.e. callee) and second (i.e. receiver) operand.
@@ -81,12 +93,44 @@
private:
Node* node_;
};
+
+
+// ECMA-262, section 15.8.2.11.
+Reduction JSBuiltinReducer::ReduceMathMax(Node* node) {
+ JSCallReduction r(node);
+ if (r.InputsMatchZero()) {
+ // Math.max() -> -Infinity
+ return Replace(jsgraph()->Constant(-V8_INFINITY));
+ }
+ if (r.InputsMatchOne(Type::Number())) {
+ // Math.max(a:number) -> a
+ return Replace(r.left());
+ }
+ if (r.InputsMatchAll(Type::Integral32())) {
+ // Math.max(a:int32, b:int32, ...)
+ Node* value = r.GetJSCallInput(0);
+ for (int i = 1; i < r.GetJSCallArity(); i++) {
+ Node* p = r.GetJSCallInput(i);
+ Node* control = graph()->start();
+ Node* tag = graph()->NewNode(simplified()->NumberLessThan(), value,
p);
+
+ Node* branch = graph()->NewNode(common()->Branch(), tag, control);
+ Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
+ Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
+ Node* merge = graph()->NewNode(common()->Merge(2), if_true,
if_false);
+
+ value = graph()->NewNode(common()->Phi(kMachNone, 2), p, value,
merge);
+ }
+ return Replace(value);
+ }
+ return NoChange();
+}
// ES6 draft 08-24-14, section 20.2.2.19.
Reduction JSBuiltinReducer::ReduceMathImul(Node* node) {
JSCallReduction r(node);
- if (r.InputsMatch(Type::Integral32(), Type::Integral32())) {
+ if (r.InputsMatchTwo(Type::Integral32(), Type::Integral32())) {
// Math.imul(a:int32, b:int32) -> Int32Mul(a, b)
Node* value = graph()->NewNode(machine()->Int32Mul(), r.left(),
r.right());
return Replace(value);
@@ -101,6 +145,8 @@
// Dispatch according to the BuiltinFunctionId if present.
if (!r.HasBuiltinFunctionId()) return NoChange();
switch (r.GetBuiltinFunctionId()) {
+ case kMathMax:
+ return ReplaceWithPureReduction(node, ReduceMathMax(node));
case kMathImul:
return ReplaceWithPureReduction(node, ReduceMathImul(node));
default:
=======================================
--- /trunk/src/compiler/js-builtin-reducer.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/compiler/js-builtin-reducer.h Tue Sep 23 10:40:53 2014 UTC
@@ -24,11 +24,13 @@
virtual Reduction Reduce(Node* node) OVERRIDE;
private:
- Graph* graph() { return jsgraph_->graph(); }
- CommonOperatorBuilder* common() { return jsgraph_->common(); }
- MachineOperatorBuilder* machine() { return jsgraph_->machine(); }
+ JSGraph* jsgraph() const { return jsgraph_; }
+ Graph* graph() const { return jsgraph_->graph(); }
+ CommonOperatorBuilder* common() const { return jsgraph_->common(); }
+ MachineOperatorBuilder* machine() const { return jsgraph_->machine(); }
SimplifiedOperatorBuilder* simplified() { return &simplified_; }
+ Reduction ReduceMathMax(Node* node);
Reduction ReduceMathImul(Node* node);
JSGraph* jsgraph_;
=======================================
--- /trunk/src/compiler/x64/code-generator-x64.cc Tue Sep 23 08:38:19 2014
UTC
+++ /trunk/src/compiler/x64/code-generator-x64.cc Tue Sep 23 10:40:53 2014
UTC
@@ -205,6 +205,7 @@
switch (ArchOpcodeField::decode(instr->opcode())) {
case kArchCallCodeObject: {
+ EnsureSpaceForLazyDeopt();
if (HasImmediateInput(instr, 0)) {
Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0));
__ Call(code, RelocInfo::CODE_TARGET);
@@ -217,6 +218,7 @@
break;
}
case kArchCallJSFunction: {
+ EnsureSpaceForLazyDeopt();
Register func = i.InputRegister(0);
if (FLAG_debug_code) {
// Check the function's context matches the context argument.
@@ -990,6 +992,21 @@
void CodeGenerator::AddNopForSmiCodeInlining() { __ nop(); }
+
+
+void CodeGenerator::EnsureSpaceForLazyDeopt() {
+ int space_needed = Deoptimizer::patch_size();
+ if (!linkage()->info()->IsStub()) {
+ // Ensure that we have enough space after the previous lazy-bailout
+ // instruction for patching the code here.
+ int current_pc = masm()->pc_offset();
+ if (current_pc < last_lazy_deopt_pc_ + space_needed) {
+ int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
+ __ Nop(padding_size);
+ }
+ }
+ MarkLazyDeoptSite();
+}
#undef __
=======================================
--- /trunk/src/ia32/code-stubs-ia32.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ia32/code-stubs-ia32.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3502,8 +3502,8 @@
__ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
__ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
- __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
// Unique names are compared by identity.
Label done;
@@ -3728,8 +3728,8 @@
// Check if the entry name is not a unique name.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(entity_name,
Map::kInstanceTypeOffset),
- miss);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
__ bind(&good);
}
@@ -3863,8 +3863,9 @@
// Check if the entry name is not a unique name.
__ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(scratch,
Map::kInstanceTypeOffset),
- &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(scratch, Map::kInstanceTypeOffset),
+ &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/ia32/lithium-codegen-ia32.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ia32/lithium-codegen-ia32.cc Tue Sep 23 10:40:53 2014 UTC
@@ -1616,10 +1616,6 @@
switch (instr->op()) {
case Token::ROR:
__ ror_cl(ToRegister(left));
- if (instr->can_deopt()) {
- __ test(ToRegister(left), ToRegister(left));
- DeoptimizeIf(sign, instr);
- }
break;
case Token::SAR:
__ sar_cl(ToRegister(left));
=======================================
--- /trunk/src/ia32/macro-assembler-ia32.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ia32/macro-assembler-ia32.cc Tue Sep 23 10:40:53 2014 UTC
@@ -2917,9 +2917,9 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Operand operand,
- Label* not_unique_name,
- Label::Distance distance) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
+ Label*
not_unique_name,
+ Label::Distance
distance) {
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Label succeed;
test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
=======================================
--- /trunk/src/ia32/macro-assembler-ia32.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ia32/macro-assembler-ia32.h Tue Sep 23 10:40:53 2014 UTC
@@ -915,13 +915,13 @@
Label* on_not_flat_one_byte_strings);
// Checks if the given register or operand is a unique name
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name,
- Label::Distance distance = Label::kFar) {
- JumpIfNotUniqueName(Operand(reg), not_unique_name, distance);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar) {
+ JumpIfNotUniqueNameInstanceType(Operand(reg), not_unique_name,
distance);
}
- void JumpIfNotUniqueName(Operand operand, Label* not_unique_name,
- Label::Distance distance = Label::kFar);
+ void JumpIfNotUniqueNameInstanceType(Operand operand, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar);
void EmitSeqStringSetCharCheck(Register string,
Register index,
=======================================
--- /trunk/src/ic/arm/ic-compiler-arm.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/arm/ic-compiler-arm.cc Tue Sep 23 10:40:53 2014 UTC
@@ -44,7 +44,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ ldr(tmp, FieldMemOperand(this->name(), HeapObject::kMapOffset));
+ __ ldrb(tmp, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ cmp(this->name(), Operand(name));
__ b(ne, &miss);
=======================================
--- /trunk/src/ic/arm64/ic-compiler-arm64.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/arm64/ic-compiler-arm64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -45,7 +45,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ Ldr(tmp, FieldMemOperand(this->name(), HeapObject::kMapOffset));
+ __ Ldrb(tmp, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ CompareAndBranch(this->name(), Operand(name), ne, &miss);
}
=======================================
--- /trunk/src/ic/ia32/ic-compiler-ia32.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/ia32/ic-compiler-ia32.cc Tue Sep 23 10:40:53 2014 UTC
@@ -48,7 +48,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ mov(tmp, FieldOperand(this->name(), HeapObject::kMapOffset));
+ __ movzx_b(tmp, FieldOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ cmp(this->name(), Immediate(name));
__ j(not_equal, &miss);
=======================================
--- /trunk/src/ic/ic.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ic/ic.cc Tue Sep 23 10:40:53 2014 UTC
@@ -1346,13 +1346,42 @@
Handle<Code> StoreIC::megamorphic_stub() {
- return PropertyICCompiler::ComputeStore(isolate(), MEGAMORPHIC,
- extra_ic_state());
+ if (kind() == Code::STORE_IC) {
+ return PropertyICCompiler::ComputeStore(isolate(), MEGAMORPHIC,
+ extra_ic_state());
+ } else {
+ DCHECK(kind() == Code::KEYED_STORE_IC);
+ if (strict_mode() == STRICT) {
+ return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
+ } else {
+ return isolate()->builtins()->KeyedStoreIC_Generic();
+ }
+ }
}
Handle<Code> StoreIC::generic_stub() const {
- return PropertyICCompiler::ComputeStore(isolate(), GENERIC,
extra_ic_state());
+ if (kind() == Code::STORE_IC) {
+ return PropertyICCompiler::ComputeStore(isolate(), GENERIC,
+ extra_ic_state());
+ } else {
+ DCHECK(kind() == Code::KEYED_STORE_IC);
+ if (strict_mode() == STRICT) {
+ return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
+ } else {
+ return isolate()->builtins()->KeyedStoreIC_Generic();
+ }
+ }
+}
+
+
+Handle<Code> StoreIC::slow_stub() const {
+ if (kind() == Code::STORE_IC) {
+ return isolate()->builtins()->StoreIC_Slow();
+ } else {
+ DCHECK(kind() == Code::KEYED_STORE_IC);
+ return isolate()->builtins()->KeyedStoreIC_Slow();
+ }
}
=======================================
--- /trunk/src/ic/ic.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ic/ic.h Tue Sep 23 10:40:53 2014 UTC
@@ -371,7 +371,7 @@
}
}
- virtual Handle<Code> megamorphic_stub();
+ virtual Handle<Code> megamorphic_stub() OVERRIDE;
// Update the inline cache and the global stub cache based on the
// lookup result.
@@ -489,14 +489,12 @@
JSReceiver::StoreFromKeyed store_mode);
protected:
- virtual Handle<Code> megamorphic_stub();
+ virtual Handle<Code> megamorphic_stub() OVERRIDE;
// Stub accessors.
- virtual Handle<Code> generic_stub() const;
+ Handle<Code> generic_stub() const;
- virtual Handle<Code> slow_stub() const {
- return isolate()->builtins()->StoreIC_Slow();
- }
+ Handle<Code> slow_stub() const;
virtual Handle<Code> pre_monomorphic_stub() const {
return pre_monomorphic_stub(isolate(), strict_mode());
@@ -577,16 +575,6 @@
return isolate->builtins()->KeyedStoreIC_PreMonomorphic();
}
}
- virtual Handle<Code> slow_stub() const {
- return isolate()->builtins()->KeyedStoreIC_Slow();
- }
- virtual Handle<Code> megamorphic_stub() {
- if (strict_mode() == STRICT) {
- return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
- } else {
- return isolate()->builtins()->KeyedStoreIC_Generic();
- }
- }
Handle<Code> StoreElementStub(Handle<JSObject> receiver,
KeyedAccessStoreMode store_mode);
@@ -595,14 +583,6 @@
inline void set_target(Code* code);
// Stub accessors.
- virtual Handle<Code> generic_stub() const {
- if (strict_mode() == STRICT) {
- return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
- } else {
- return isolate()->builtins()->KeyedStoreIC_Generic();
- }
- }
-
Handle<Code> sloppy_arguments_stub() {
return isolate()->builtins()->KeyedStoreIC_SloppyArguments();
}
=======================================
--- /trunk/src/ic/mips/ic-compiler-mips.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/mips/ic-compiler-mips.cc Tue Sep 23 10:40:53 2014 UTC
@@ -27,7 +27,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ lw(tmp, FieldMemOperand(this->name(), HeapObject::kMapOffset));
+ __ lbu(tmp, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ Branch(&miss, ne, this->name(), Operand(name));
}
=======================================
--- /trunk/src/ic/mips64/ic-compiler-mips64.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/mips64/ic-compiler-mips64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -27,7 +27,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ ld(tmp, FieldMemOperand(this->name(), HeapObject::kMapOffset));
+ __ lbu(tmp, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ Branch(&miss, ne, this->name(), Operand(name));
}
=======================================
--- /trunk/src/ic/x64/ic-compiler-x64.cc Wed Sep 17 00:05:08 2014 UTC
+++ /trunk/src/ic/x64/ic-compiler-x64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -82,7 +82,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ movp(tmp, FieldOperand(this->name(), HeapObject::kMapOffset));
+ __ movzxbp(tmp, FieldOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ Cmp(this->name(), name);
__ j(not_equal, &miss);
=======================================
--- /trunk/src/ic/x87/ic-compiler-x87.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/ic/x87/ic-compiler-x87.cc Tue Sep 23 10:40:53 2014 UTC
@@ -48,7 +48,11 @@
// In case we are compiling an IC for dictionary loads and stores, just
// check whether the name is unique.
if (name.is_identical_to(isolate()->factory()->normal_ic_symbol())) {
- __ JumpIfNotUniqueName(this->name(), &miss);
+ Register tmp = scratch1();
+ __ JumpIfSmi(this->name(), &miss);
+ __ mov(tmp, FieldOperand(this->name(), HeapObject::kMapOffset));
+ __ movzx_b(tmp, FieldOperand(tmp, Map::kInstanceTypeOffset));
+ __ JumpIfNotUniqueNameInstanceType(tmp, &miss);
} else {
__ cmp(this->name(), Immediate(name));
__ j(not_equal, &miss);
=======================================
--- /trunk/src/mips/code-stubs-mips.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/mips/code-stubs-mips.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3644,8 +3644,8 @@
__ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
__ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss);
- __ JumpIfNotUniqueName(tmp2, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
// Use a0 as result
__ mov(v0, a0);
@@ -3899,7 +3899,7 @@
__ lw(entity_name, FieldMemOperand(entity_name,
HeapObject::kMapOffset));
__ lbu(entity_name,
FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entity_name, miss);
+ __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
__ bind(&good);
// Restore the properties.
@@ -4076,7 +4076,7 @@
__ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
__ lbu(entry_key,
FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/mips/macro-assembler-mips.cc Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/mips/macro-assembler-mips.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3582,8 +3582,8 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Register reg,
- Label* not_unique_name) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
+ Label*
not_unique_name) {
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Label succeed;
And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
=======================================
--- /trunk/src/mips/macro-assembler-mips.h Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/mips/macro-assembler-mips.h Tue Sep 23 10:40:53 2014 UTC
@@ -1483,7 +1483,7 @@
void JumpIfInstanceTypeIsNotSequentialOneByte(Register type, Register
scratch,
Label* failure);
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name);
void EmitSeqStringSetCharCheck(Register string,
Register index,
=======================================
--- /trunk/src/mips64/code-stubs-mips64.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/mips64/code-stubs-mips64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3681,8 +3681,8 @@
__ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
__ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss);
- __ JumpIfNotUniqueName(tmp2, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
// Use a0 as result
__ mov(v0, a0);
@@ -3937,7 +3937,7 @@
__ ld(entity_name, FieldMemOperand(entity_name,
HeapObject::kMapOffset));
__ lbu(entity_name,
FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entity_name, miss);
+ __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
__ bind(&good);
// Restore the properties.
@@ -4114,7 +4114,7 @@
__ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
__ lbu(entry_key,
FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/mips64/macro-assembler-mips64.cc Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/mips64/macro-assembler-mips64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3492,8 +3492,8 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Register reg,
- Label* not_unique_name) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
+ Label*
not_unique_name) {
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Label succeed;
And(at, reg, Operand(kIsNotStringMask | kIsNotInternalizedMask));
=======================================
--- /trunk/src/mips64/macro-assembler-mips64.h Thu Sep 11 00:05:22 2014 UTC
+++ /trunk/src/mips64/macro-assembler-mips64.h Tue Sep 23 10:40:53 2014 UTC
@@ -1554,7 +1554,7 @@
void JumpIfInstanceTypeIsNotSequentialOneByte(Register type, Register
scratch,
Label* failure);
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name);
void EmitSeqStringSetCharCheck(Register string,
Register index,
=======================================
--- /trunk/src/version.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/version.cc Tue Sep 23 10:40:53 2014 UTC
@@ -34,7 +34,7 @@
// system so their names cannot be changed without changing the scripts.
#define MAJOR_VERSION 3
#define MINOR_VERSION 29
-#define BUILD_NUMBER 82
+#define BUILD_NUMBER 83
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
=======================================
--- /trunk/src/x64/code-stubs-x64.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x64/code-stubs-x64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3454,8 +3454,8 @@
__ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
__ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
- __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
// Unique names are compared by identity.
Label done;
@@ -3674,8 +3674,8 @@
// Check if the entry name is not a unique name.
__ movp(entity_name, FieldOperand(entity_name,
HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(entity_name,
Map::kInstanceTypeOffset),
- miss);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
__ bind(&good);
}
@@ -3804,8 +3804,9 @@
// Check if the entry name is not a unique name.
__ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(scratch,
Map::kInstanceTypeOffset),
- &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(scratch, Map::kInstanceTypeOffset),
+ &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/x64/macro-assembler-x64.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x64/macro-assembler-x64.cc Tue Sep 23 10:40:53 2014 UTC
@@ -2701,16 +2701,16 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Operand operand,
- Label* not_unique_name,
- Label::Distance distance) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
+ Label*
not_unique_name,
+ Label::Distance
distance) {
JumpIfNotUniqueNameHelper<Operand>(this, operand, not_unique_name,
distance);
}
-void MacroAssembler::JumpIfNotUniqueName(Register reg,
- Label* not_unique_name,
- Label::Distance distance) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
+ Label*
not_unique_name,
+ Label::Distance
distance) {
JumpIfNotUniqueNameHelper<Register>(this, reg, not_unique_name,
distance);
}
=======================================
--- /trunk/src/x64/macro-assembler-x64.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x64/macro-assembler-x64.h Tue Sep 23 10:40:53 2014 UTC
@@ -797,10 +797,10 @@
uint32_t encoding_mask);
// Checks if the given register or operand is a unique name
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name,
- Label::Distance distance = Label::kFar);
- void JumpIfNotUniqueName(Operand operand, Label* not_unique_name,
- Label::Distance distance = Label::kFar);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar);
+ void JumpIfNotUniqueNameInstanceType(Operand operand, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar);
//
---------------------------------------------------------------------------
// Macro instructions.
=======================================
--- /trunk/src/x87/code-stubs-x87.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x87/code-stubs-x87.cc Tue Sep 23 10:40:53 2014 UTC
@@ -3167,8 +3167,8 @@
__ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
__ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
- __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
- __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
+ __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
// Unique names are compared by identity.
Label done;
@@ -3393,8 +3393,8 @@
// Check if the entry name is not a unique name.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(entity_name,
Map::kInstanceTypeOffset),
- miss);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
__ bind(&good);
}
@@ -3528,8 +3528,9 @@
// Check if the entry name is not a unique name.
__ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- __ JumpIfNotUniqueName(FieldOperand(scratch,
Map::kInstanceTypeOffset),
- &maybe_in_dictionary);
+ __ JumpIfNotUniqueNameInstanceType(
+ FieldOperand(scratch, Map::kInstanceTypeOffset),
+ &maybe_in_dictionary);
}
}
=======================================
--- /trunk/src/x87/macro-assembler-x87.cc Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x87/macro-assembler-x87.cc Tue Sep 23 10:40:53 2014 UTC
@@ -2869,9 +2869,9 @@
}
-void MacroAssembler::JumpIfNotUniqueName(Operand operand,
- Label* not_unique_name,
- Label::Distance distance) {
+void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
+ Label*
not_unique_name,
+ Label::Distance
distance) {
STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
Label succeed;
test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
=======================================
--- /trunk/src/x87/macro-assembler-x87.h Tue Sep 23 08:38:19 2014 UTC
+++ /trunk/src/x87/macro-assembler-x87.h Tue Sep 23 10:40:53 2014 UTC
@@ -882,13 +882,13 @@
Label* on_not_flat_one_byte_strings);
// Checks if the given register or operand is a unique name
- void JumpIfNotUniqueName(Register reg, Label* not_unique_name,
- Label::Distance distance = Label::kFar) {
- JumpIfNotUniqueName(Operand(reg), not_unique_name, distance);
+ void JumpIfNotUniqueNameInstanceType(Register reg, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar) {
+ JumpIfNotUniqueNameInstanceType(Operand(reg), not_unique_name,
distance);
}
- void JumpIfNotUniqueName(Operand operand, Label* not_unique_name,
- Label::Distance distance = Label::kFar);
+ void JumpIfNotUniqueNameInstanceType(Operand operand, Label*
not_unique_name,
+ Label::Distance distance =
Label::kFar);
void EmitSeqStringSetCharCheck(Register string,
Register index,
=======================================
--- /trunk/test/cctest/compiler/test-js-typed-lowering.cc Tue Sep 23
08:38:19 2014 UTC
+++ /trunk/test/cctest/compiler/test-js-typed-lowering.cc Tue Sep 23
10:40:53 2014 UTC
@@ -1383,6 +1383,48 @@
}
}
}
+
+
+TEST(BuiltinMathMax) {
+ JSTypedLoweringTester R;
+
+ Node* fun = R.HeapConstant(handle(R.isolate->context()->math_max_fun()));
+ Node* call = R.graph.NewNode(R.javascript.Call(2,
NO_CALL_FUNCTION_FLAGS),
+ fun, R.UndefinedConstant());
+ Node* r = R.reduce(call);
+ R.CheckNumberConstant(-V8_INFINITY, r);
+
+ for (size_t i = 0; i < arraysize(kNumberTypes); i++) {
+ Type* t0 = kNumberTypes[i];
+ Node* p0 = R.Parameter(t0, 0);
+ Node* call = R.graph.NewNode(R.javascript.Call(3,
NO_CALL_FUNCTION_FLAGS),
+ fun, R.UndefinedConstant(), p0);
+ Node* r = R.reduce(call);
+ CHECK_EQ(IrOpcode::kParameter, r->opcode());
+ CHECK_EQ(p0, r);
+ }
+
+ for (size_t i = 0; i < arraysize(kNumberTypes); i++) {
+ for (size_t j = 0; j < arraysize(kNumberTypes); j++) {
+ Type* t0 = kNumberTypes[i];
+ Node* p0 = R.Parameter(t0, 0);
+ Type* t1 = kNumberTypes[j];
+ Node* p1 = R.Parameter(t1, 1);
+ Node* call = R.graph.NewNode(R.javascript.Call(4,
NO_CALL_FUNCTION_FLAGS),
+ fun, R.UndefinedConstant(), p0, p1);
+ Node* r = R.reduce(call);
+
+ if (t0->Is(Type::Integral32()) && t1->Is(Type::Integral32())) {
+ CHECK_EQ(IrOpcode::kPhi, r->opcode());
+ CHECK(p0 == r->InputAt(0) || p1 == r->InputAt(0));
+ CHECK(p1 == r->InputAt(1) || p0 == r->InputAt(1));
+ } else {
+ CHECK_EQ(IrOpcode::kJSCallFunction, r->opcode());
+ CHECK_EQ(call, r);
+ }
+ }
+ }
+}
TEST(BuiltinMathImul) {
=======================================
--- /trunk/test/mjsunit/regress/string-set-char-deopt.js Mon Sep 15
00:05:18 2014 UTC
+++ /trunk/test/mjsunit/regress/string-set-char-deopt.js Tue Sep 23
10:40:53 2014 UTC
@@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-// Flags: --allow-natives-syntax
+// Flags: --allow-natives-syntax --turbo-deoptimization
(function OneByteSeqStringSetCharDeoptOsr() {
function deopt() {
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.