Revision: 11107
Author: [email protected]
Date: Wed Mar 21 07:29:14 2012
Log: Enable snapshots on MIPS. This is based on
http://codereview.chromium.org/9372063 by Daniel Kalmar.
Review URL: https://chromiumcodereview.appspot.com/9722020
http://code.google.com/p/v8/source/detail?r=11107
Modified:
/branches/bleeding_edge/SConstruct
/branches/bleeding_edge/src/arm/assembler-arm-inl.h
/branches/bleeding_edge/src/arm/assembler-arm.cc
/branches/bleeding_edge/src/arm/assembler-arm.h
/branches/bleeding_edge/src/ia32/assembler-ia32-inl.h
/branches/bleeding_edge/src/ia32/assembler-ia32.h
/branches/bleeding_edge/src/mips/assembler-mips-inl.h
/branches/bleeding_edge/src/mips/assembler-mips.cc
/branches/bleeding_edge/src/mips/assembler-mips.h
/branches/bleeding_edge/src/mips/builtins-mips.cc
/branches/bleeding_edge/src/mips/code-stubs-mips.cc
/branches/bleeding_edge/src/mips/full-codegen-mips.cc
/branches/bleeding_edge/src/mips/ic-mips.cc
/branches/bleeding_edge/src/mips/lithium-codegen-mips.cc
/branches/bleeding_edge/src/mips/macro-assembler-mips.cc
/branches/bleeding_edge/src/mips/macro-assembler-mips.h
/branches/bleeding_edge/src/mips/regexp-macro-assembler-mips.cc
/branches/bleeding_edge/src/serialize.cc
/branches/bleeding_edge/src/serialize.h
/branches/bleeding_edge/src/x64/assembler-x64-inl.h
/branches/bleeding_edge/src/x64/assembler-x64.cc
/branches/bleeding_edge/src/x64/assembler-x64.h
/branches/bleeding_edge/test/cctest/cctest.status
=======================================
--- /branches/bleeding_edge/SConstruct Tue Mar 13 09:18:30 2012
+++ /branches/bleeding_edge/SConstruct Wed Mar 21 07:29:14 2012
@@ -218,9 +218,12 @@
'LINKFLAGS': ['-m32'],
'mipsabi:softfloat': {
'CPPDEFINES': ['__mips_soft_float=1'],
+ 'fpu:on': {
+ 'CPPDEFINES' : ['CAN_USE_FPU_INSTRUCTIONS']
+ }
},
'mipsabi:hardfloat': {
- 'CPPDEFINES': ['__mips_hard_float=1'],
+ 'CPPDEFINES':
['__mips_hard_float=1', 'CAN_USE_FPU_INSTRUCTIONS'],
}
},
'arch:x64': {
@@ -575,7 +578,10 @@
},
'mipsabi:hardfloat': {
'CCFLAGS': ['-mhard-float'],
- 'LINKFLAGS': ['-mhard-float']
+ 'LINKFLAGS': ['-mhard-float'],
+ 'fpu:on': {
+ 'CPPDEFINES' : ['CAN_USE_FPU_INSTRUCTIONS']
+ }
}
}
},
@@ -1146,6 +1152,11 @@
'default': 'on',
'help': 'use vfp3 instructions when building the snapshot [Arm only]'
},
+ 'fpu': {
+ 'values': ['on', 'off'],
+ 'default': 'on',
+ 'help': 'use fpu instructions when building the snapshot [MIPS only]'
+ },
}
=======================================
--- /branches/bleeding_edge/src/arm/assembler-arm-inl.h Wed Jan 25 08:31:25
2012
+++ /branches/bleeding_edge/src/arm/assembler-arm-inl.h Wed Mar 21 07:29:14
2012
@@ -80,7 +80,7 @@
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return kPointerSize;
}
@@ -364,8 +364,14 @@
}
-void Assembler::set_target_at(Address constant_pool_entry,
- Address target) {
+void Assembler::deserialization_set_special_target_at(
+ Address constant_pool_entry, Address target) {
+ Memory::Address_at(constant_pool_entry) = target;
+}
+
+
+void Assembler::set_external_target_at(Address constant_pool_entry,
+ Address target) {
Memory::Address_at(constant_pool_entry) = target;
}
=======================================
--- /branches/bleeding_edge/src/arm/assembler-arm.cc Mon Mar 12 06:56:56
2012
+++ /branches/bleeding_edge/src/arm/assembler-arm.cc Wed Mar 21 07:29:14
2012
@@ -137,7 +137,6 @@
// generate those yet.
return false;
}
-
void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
=======================================
--- /branches/bleeding_edge/src/arm/assembler-arm.h Mon Mar 12 06:56:56 2012
+++ /branches/bleeding_edge/src/arm/assembler-arm.h Wed Mar 21 07:29:14 2012
@@ -685,20 +685,18 @@
// This sets the branch destination (which is in the constant pool on
ARM).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address constant_pool_entry, Address
target);
+ inline static void deserialization_set_special_target_at(
+ Address constant_pool_entry, Address target);
// This sets the branch destination (which is in the constant pool on
ARM).
// This is for calls and branches to runtime code.
inline static void set_external_target_at(Address constant_pool_entry,
- Address target) {
- set_target_at(constant_pool_entry, target);
- }
+ Address target);
// Here we are patching the address in the constant pool, not the actual
call
// instruction. The address in the constant pool is the same size as a
// pointer.
- static const int kCallTargetSize = kPointerSize;
- static const int kExternalTargetSize = kPointerSize;
+ static const int kSpecialTargetSize = kPointerSize;
// Size of an instruction.
static const int kInstrSize = sizeof(Instr);
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32-inl.h Wed Jan 25
08:31:25 2012
+++ /branches/bleeding_edge/src/ia32/assembler-ia32-inl.h Wed Mar 21
07:29:14 2012
@@ -88,7 +88,7 @@
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return Assembler::kSpecialTargetSize;
}
=======================================
--- /branches/bleeding_edge/src/ia32/assembler-ia32.h Mon Mar 12 06:56:56
2012
+++ /branches/bleeding_edge/src/ia32/assembler-ia32.h Wed Mar 21 07:29:14
2012
@@ -598,8 +598,8 @@
// This sets the branch destination (which is in the instruction on x86).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
set_target_address_at(instruction_payload, target);
}
@@ -610,8 +610,7 @@
set_target_address_at(instruction_payload, target);
}
- static const int kCallTargetSize = kPointerSize;
- static const int kExternalTargetSize = kPointerSize;
+ static const int kSpecialTargetSize = kPointerSize;
// Distance between the address of the code target in the call
instruction
// and the return address
=======================================
--- /branches/bleeding_edge/src/mips/assembler-mips-inl.h Wed Jan 25
08:31:25 2012
+++ /branches/bleeding_edge/src/mips/assembler-mips-inl.h Wed Mar 21
07:29:14 2012
@@ -117,13 +117,31 @@
Address RelocInfo::target_address_address() {
- ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY);
- return reinterpret_cast<Address>(pc_);
+ ASSERT(IsCodeTarget(rmode_) ||
+ rmode_ == RUNTIME_ENTRY ||
+ rmode_ == EMBEDDED_OBJECT ||
+ rmode_ == EXTERNAL_REFERENCE);
+ // Read the address of the word containing the target_address in an
+ // instruction stream.
+ // The only architecture-independent user of this function is the
serializer.
+ // The serializer uses it to find out how many raw bytes of instruction
to
+ // output before the next target.
+ // For an instruction like LUI/ORI where the target bits are mixed into
the
+ // instruction bits, the size of the target will be zero, indicating
that the
+ // serializer should not step forward in memory after a target is
resolved
+ // and written. In this case the target_address_address function should
+ // return the end of the instructions to be patched, allowing the
+ // deserializer to deserialize the instructions as raw bytes and put
them in
+ // place, ready to be patched with the target. After jump optimization,
+ // that is the address of the instruction that follows J/JAL/JR/JALR
+ // instruction.
+ return reinterpret_cast<Address>(
+ pc_ + Assembler::kInstructionsFor32BitConstant *
Assembler::kInstrSize);
}
int RelocInfo::target_address_size() {
- return Assembler::kExternalTargetSize;
+ return Assembler::kSpecialTargetSize;
}
@@ -281,7 +299,7 @@
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
visitor->VisitGlobalPropertyCell(this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- visitor->VisitExternalReference(target_reference_address());
+ visitor->VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
// TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
@@ -307,7 +325,7 @@
} else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
StaticVisitor::VisitGlobalPropertyCell(heap, this);
} else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
- StaticVisitor::VisitExternalReference(target_reference_address());
+ StaticVisitor::VisitExternalReference(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
} else if (heap->isolate()->debug()->has_break_points() &&
((RelocInfo::IsJSReturn(mode) &&
=======================================
--- /branches/bleeding_edge/src/mips/assembler-mips.cc Tue Mar 13 09:18:30
2012
+++ /branches/bleeding_edge/src/mips/assembler-mips.cc Wed Mar 21 07:29:14
2012
@@ -849,7 +849,6 @@
bool Assembler::MustUseReg(RelocInfo::Mode rmode) {
return rmode != RelocInfo::NONE;
}
-
void Assembler::GenInstrRegister(Opcode opcode,
Register rs,
=======================================
--- /branches/bleeding_edge/src/mips/assembler-mips.h Thu Jan 19 05:41:11
2012
+++ /branches/bleeding_edge/src/mips/assembler-mips.h Wed Mar 21 07:29:14
2012
@@ -553,10 +553,13 @@
static void JumpLabelToJumpRegister(Address pc);
// This sets the branch destination (which gets loaded at the call
address).
- // This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
- set_target_address_at(instruction_payload, target);
+ // This is for calls and branches within generated code. The serializer
+ // has already deserialized the lui/ori instructions etc.
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
+ set_target_address_at(
+ instruction_payload - kInstructionsFor32BitConstant * kInstrSize,
+ target);
}
// This sets the branch destination.
@@ -578,8 +581,7 @@
// are split across two consecutive instructions and don't exist
separately
// in the code, so the serializer should not step forwards in memory
after
// a target is resolved and written.
- static const int kCallTargetSize = 0 * kInstrSize;
- static const int kExternalTargetSize = 0 * kInstrSize;
+ static const int kSpecialTargetSize = 0;
// Number of consecutive instructions used to store 32bit constant.
// Before jump-optimizations, this constant was used in
=======================================
--- /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Mar 21 01:41:16
2012
+++ /branches/bleeding_edge/src/mips/builtins-mips.cc Wed Mar 21 07:29:14
2012
@@ -1097,8 +1097,6 @@
// Set up the context from the function argument.
__ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
- __ InitializeRootRegister();
-
// Push the function and the receiver onto the stack.
__ Push(a1, a2);
=======================================
--- /branches/bleeding_edge/src/mips/code-stubs-mips.cc Wed Mar 21 01:41:16
2012
+++ /branches/bleeding_edge/src/mips/code-stubs-mips.cc Wed Mar 21 07:29:14
2012
@@ -3942,13 +3942,16 @@
// Special handling of out of memory exceptions.
Failure* out_of_memory = Failure::OutOfMemoryException();
- __ Branch(USE_DELAY_SLOT, throw_out_of_memory_exception, eq,
- v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+ __ Branch(USE_DELAY_SLOT,
+ throw_out_of_memory_exception,
+ eq,
+ v0,
+ Operand(reinterpret_cast<int32_t>(out_of_memory)));
// If we throw the OOM exception, the value of a3 doesn't matter.
// Any instruction can be in the delay slot that's not a jump.
// Retrieve the pending exception and clear the variable.
- __ li(a3, Operand(isolate->factory()->the_hole_value()));
+ __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ lw(v0, MemOperand(t0));
@@ -3956,8 +3959,8 @@
// Special handling of termination exceptions which are uncatchable
// by javascript code.
- __ Branch(throw_termination_exception, eq,
- v0, Operand(isolate->factory()->termination_exception()));
+ __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
+ __ Branch(throw_termination_exception, eq, v0, Operand(t0));
// Handle normal exception.
__ jmp(throw_normal_exception);
@@ -4084,6 +4087,7 @@
offset_to_argv += kNumCalleeSavedFPU * kDoubleSize;
}
+ __ InitializeRootRegister();
__ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize));
// We build an EntryFrame.
@@ -4156,7 +4160,7 @@
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ li(t1, Operand(isolate->factory()->the_hole_value()));
+ __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
__ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
isolate)));
__ sw(t1, MemOperand(t0));
@@ -4200,7 +4204,9 @@
// Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2;
__ pop(t1);
- __ Branch(&non_outermost_js_2, ne, t1,
+ __ Branch(&non_outermost_js_2,
+ ne,
+ t1,
Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
__ li(t1, Operand(ExternalReference(js_entry_sp)));
__ sw(zero_reg, MemOperand(t1));
@@ -4365,8 +4371,10 @@
__ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE));
// Null is not instance of anything.
- __ Branch(&object_not_null, ne, scratch,
- Operand(masm->isolate()->factory()->null_value()));
+ __ Branch(&object_not_null,
+ ne,
+ scratch,
+ Operand(masm->isolate()->factory()->null_value()));
__ li(v0, Operand(Smi::FromInt(1)));
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
@@ -4471,8 +4479,10 @@
Label runtime;
__ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
- __ Branch(&runtime, ne,
- a2, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ Branch(&runtime,
+ ne,
+ a2,
+ Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// Patch the arguments.length and the parameters pointer in the current
frame.
__ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
@@ -4504,7 +4514,9 @@
Label adaptor_frame, try_allocate;
__ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset));
- __ Branch(&adaptor_frame, eq, a2,
+ __ Branch(&adaptor_frame,
+ eq,
+ a2,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// No adaptor, parameter count = argument count.
@@ -5115,14 +5127,11 @@
// Check the result.
Label success;
- __ Branch(&success, eq,
- v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ Branch(&success, eq, v0,
Operand(NativeRegExpMacroAssembler::SUCCESS));
Label failure;
- __ Branch(&failure, eq,
- v0, Operand(NativeRegExpMacroAssembler::FAILURE));
+ __ Branch(&failure, eq, v0,
Operand(NativeRegExpMacroAssembler::FAILURE));
// If not exception it can only be retry. Handle that in the runtime
system.
- __ Branch(&runtime, ne,
- v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
+ __ Branch(&runtime, ne, v0,
Operand(NativeRegExpMacroAssembler::EXCEPTION));
// Result must now be exception. If there is no pending exception
already a
// stack overflow (on the backtrack stack) was detected in RegExp code
but
// haven't created the exception yet. Handle that in the runtime system.
@@ -5875,10 +5884,8 @@
__ Branch(&tmp, Ugreater, scratch, Operand(static_cast<int>('9' - '0')));
__ Or(c1, c1, scratch1);
__ bind(&tmp);
- __ Branch(not_found,
- Uless_equal,
- scratch,
- Operand(static_cast<int>('9' - '0')));
+ __ Branch(
+ not_found, Uless_equal, scratch, Operand(static_cast<int>('9'
- '0')));
__ bind(¬_array_index);
// Calculate the two character string hash.
@@ -6548,8 +6555,7 @@
__ bind(&longer_than_two);
// Check if resulting string will be flat.
- __ Branch(&string_add_flat_result, lt, t2,
- Operand(ConsString::kMinLength));
+ __ Branch(&string_add_flat_result, lt, t2,
Operand(ConsString::kMinLength));
// Handle exceptionally long strings in the runtime system.
STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
ASSERT(IsPowerOf2(String::kMaxLength + 1));
@@ -7076,8 +7082,10 @@
// Push return address (accessible to GC through exit frame pc).
// This spot for ra was reserved in EnterExitFrame.
masm->sw(ra, MemOperand(sp, kCArgsSlotsSize));
- masm->li(ra, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
- RelocInfo::CODE_TARGET), true);
+ masm->li(ra,
+ Operand(reinterpret_cast<intptr_t>(GetCode().location()),
+ RelocInfo::CODE_TARGET),
+ CONSTANT_SIZE);
// Call the function.
masm->Jump(t9);
// Make sure the stored 'ra' points to this position.
=======================================
--- /branches/bleeding_edge/src/mips/full-codegen-mips.cc Thu Mar 15
02:28:49 2012
+++ /branches/bleeding_edge/src/mips/full-codegen-mips.cc Wed Mar 21
07:29:14 2012
@@ -120,7 +120,7 @@
int FullCodeGenerator::self_optimization_header_size() {
- return 11 * Instruction::kInstrSize;
+ return 10 * Instruction::kInstrSize;
}
@@ -2613,7 +2613,7 @@
Label entry, loop;
// The use of t2 to store the valueOf symbol asumes that it is not
otherwise
// used in the loop below.
- __ li(t2, Operand(FACTORY->value_of_symbol()));
+ __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
__ jmp(&entry);
__ bind(&loop);
__ lw(a3, MemOperand(t0, 0));
=======================================
--- /branches/bleeding_edge/src/mips/ic-mips.cc Wed Mar 21 01:41:16 2012
+++ /branches/bleeding_edge/src/mips/ic-mips.cc Wed Mar 21 07:29:14 2012
@@ -758,8 +758,6 @@
Register scratch3,
Label* unmapped_case,
Label* slow_case) {
- Heap* heap = masm->isolate()->heap();
-
// Check that the receiver is a JSObject. Because of the map check
// later, we do not need to check for interceptors or whether it
// requires access checks.
@@ -773,10 +771,12 @@
__ Branch(slow_case, ne, scratch1, Operand(zero_reg));
// Load the elements into scratch1 and check its map.
- Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
__ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
- __ CheckMap(scratch1, scratch2, arguments_map, slow_case,
DONT_DO_SMI_CHECK);
-
+ __ CheckMap(scratch1,
+ scratch2,
+ Heap::kNonStrictArgumentsElementsMapRootIndex,
+ slow_case,
+ DONT_DO_SMI_CHECK);
// Check if element is in the range of mapped arguments. If not, jump
// to the unmapped lookup with the parameter map in scratch1.
__ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
@@ -820,8 +820,10 @@
const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
Register backing_store = parameter_map;
__ lw(backing_store, FieldMemOperand(parameter_map,
kBackingStoreOffset));
- Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
- __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
+ __ CheckMap(backing_store,
+ scratch,
+ Heap::kFixedArrayMapRootIndex,
+ slow_case,
DONT_DO_SMI_CHECK);
__ lw(scratch, FieldMemOperand(backing_store,
FixedArray::kLengthOffset));
__ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
@@ -1253,8 +1255,9 @@
__ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
__ Branch(&slow, hs, key, Operand(t0));
__ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ Branch(&check_if_double_array, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_array_map()));
+ __ Branch(
+ &check_if_double_array, ne, elements_map,
Heap::kFixedArrayMapRootIndex);
+
// Calculate key + 1 as smi.
STATIC_ASSERT(kSmiTag == 0);
__ Addu(t0, key, Operand(Smi::FromInt(1)));
@@ -1262,8 +1265,7 @@
__ Branch(&fast_object_without_map_check);
__ bind(&check_if_double_array);
- __ Branch(&slow, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_double_array_map()));
+ __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
// Add 1 to key, and go to common element store code for doubles.
STATIC_ASSERT(kSmiTag == 0);
__ Addu(t0, key, Operand(Smi::FromInt(1)));
@@ -1285,8 +1287,10 @@
Register scratch_value = t0;
Register address = t1;
__ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ Branch(&fast_double_with_map_check, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_array_map()));
+ __ Branch(&fast_double_with_map_check,
+ ne,
+ elements_map,
+ Heap::kFixedArrayMapRootIndex);
__ bind(&fast_object_without_map_check);
// Smi stores don't require further checks.
Label non_smi_value;
@@ -1323,8 +1327,7 @@
__ bind(&fast_double_with_map_check);
// Check for fast double array case. If this fails, call through to the
// runtime.
- __ Branch(&slow, ne, elements_map,
- Operand(masm->isolate()->factory()->fixed_double_array_map()));
+ __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
__ bind(&fast_double_without_map_check);
__ StoreNumberToDoubleElements(value,
key,
=======================================
--- /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Wed Mar 21
01:41:16 2012
+++ /branches/bleeding_edge/src/mips/lithium-codegen-mips.cc Wed Mar 21
07:29:14 2012
@@ -2077,7 +2077,7 @@
// We use Factory::the_hole_value() on purpose instead of loading from
the
// root array to force relocation to be able to later patch
// with true or false.
- __ li(result, Operand(factory()->the_hole_value()), true);
+ __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE);
__ Branch(&done);
// The inlined call site cache did not match. Check null and string
before
@@ -2132,7 +2132,7 @@
__ bind(&before_push_delta);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
- __ li(temp, Operand(delta * kPointerSize), true);
+ __ li(temp, Operand(delta * kPointerSize), CONSTANT_SIZE);
__ StoreToSafepointRegisterSlot(temp, temp);
}
CallCodeGeneric(stub.GetCode(),
=======================================
--- /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Wed Mar 21
01:41:16 2012
+++ /branches/bleeding_edge/src/mips/macro-assembler-mips.cc Wed Mar 21
07:29:14 2012
@@ -767,14 +767,13 @@
}
}
}
-
//------------Pseudo-instructions-------------
-void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
+void MacroAssembler::li(Register rd, Operand j, LiFlags mode) {
ASSERT(!j.is_reg());
BlockTrampolinePoolScope block_trampoline_pool(this);
- if (!MustUseReg(j.rmode_) && !gen2instr) {
+ if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) {
// Normal load of an immediate value which does not need Relocation
Info.
if (is_int16(j.imm32_)) {
addiu(rd, zero_reg, j.imm32_);
@@ -786,7 +785,7 @@
lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask);
ori(rd, rd, (j.imm32_ & kImm16Mask));
}
- } else if (MustUseReg(j.rmode_) || gen2instr) {
+ } else {
if (MustUseReg(j.rmode_)) {
RecordRelocInfo(j.rmode_, j.imm32_);
}
@@ -1645,6 +1644,16 @@
}
}
}
+
+
+void MacroAssembler::Branch(Label* L,
+ Condition cond,
+ Register rs,
+ Heap::RootListIndex index,
+ BranchDelaySlot bdslot) {
+ LoadRoot(at, index);
+ Branch(L, cond, rs, Operand(at), bdslot);
+}
void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) {
@@ -2541,7 +2550,7 @@
// Must record previous source positions before the
// li() generates a new code target.
positions_recorder()->WriteRecordedPositions();
- li(t9, Operand(target_int, rmode), true);
+ li(t9, Operand(target_int, rmode), CONSTANT_SIZE);
Call(t9, cond, rs, rt, bd);
ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd),
SizeOfCodeGeneratedSince(&start));
@@ -2752,7 +2761,7 @@
unsigned state =
StackHandler::IndexField::encode(handler_index) |
StackHandler::KindField::encode(kind);
- li(t1, Operand(CodeObject()));
+ li(t1, Operand(CodeObject()), CONSTANT_SIZE);
li(t2, Operand(state));
// Push the frame pointer, context, state, and code object.
@@ -3381,7 +3390,7 @@
// Ensure that the object is a heap number
CheckMap(value_reg,
scratch1,
- isolate()->factory()->heap_number_map(),
+ Heap::kHeapNumberMapRootIndex,
fail,
DONT_DO_SMI_CHECK);
@@ -4493,7 +4502,7 @@
void MacroAssembler::EnterFrame(StackFrame::Type type) {
addiu(sp, sp, -5 * kPointerSize);
li(t8, Operand(Smi::FromInt(type)));
- li(t9, Operand(CodeObject()));
+ li(t9, Operand(CodeObject()), CONSTANT_SIZE);
sw(ra, MemOperand(sp, 4 * kPointerSize));
sw(fp, MemOperand(sp, 3 * kPointerSize));
sw(cp, MemOperand(sp, 2 * kPointerSize));
@@ -4537,7 +4546,8 @@
sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset));
}
- li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot.
+ // Accessed from ExitFrame::code_slot.
+ li(t8, Operand(CodeObject()), CONSTANT_SIZE);
sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
// Save the frame pointer and the context in top.
@@ -5263,7 +5273,7 @@
FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
Label not_smi;
JumpIfNotSmi(descriptors, ¬_smi);
- li(descriptors, Operand(FACTORY->empty_descriptor_array()));
+ LoadRoot(descriptors, Heap::kEmptyDescriptorArrayRootIndex);
bind(¬_smi);
}
=======================================
--- /branches/bleeding_edge/src/mips/macro-assembler-mips.h Wed Mar 21
01:41:16 2012
+++ /branches/bleeding_edge/src/mips/macro-assembler-mips.h Wed Mar 21
07:29:14 2012
@@ -81,6 +81,16 @@
PROTECT
};
+// Flags used for the li macro-assembler function.
+enum LiFlags {
+ // If the constant value can be represented in just 16 bits, then
+ // optimize the li to use a single instruction, rather than lui/ori pair.
+ OPTIMIZE_SIZE = 0,
+ // Always use 2 instructions (lui/ori pair), even if the constant could
+ // be loaded with just one, so that this value is patchable later.
+ CONSTANT_SIZE = 1
+};
+
enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
@@ -183,6 +193,12 @@
Register rs = zero_reg, const Operand& rt = Operand(zero_reg)) {
Ret(cond, rs, rt, bd);
}
+
+ void Branch(Label* L,
+ Condition cond,
+ Register rs,
+ Heap::RootListIndex index,
+ BranchDelaySlot bdslot = PROTECT);
#undef COND_ARGS
@@ -247,7 +263,6 @@
void jmp(Label* L) {
Branch(L);
}
-
// Load an object from the root table.
void LoadRoot(Register destination,
@@ -579,12 +594,13 @@
void mov(Register rd, Register rt) { or_(rd, rt, zero_reg); }
// Load int32 in the rd register.
- void li(Register rd, Operand j, bool gen2instr = false);
- inline void li(Register rd, int32_t j, bool gen2instr = false) {
- li(rd, Operand(j), gen2instr);
- }
- inline void li(Register dst, Handle<Object> value, bool gen2instr =
false) {
- li(dst, Operand(value), gen2instr);
+ void li(Register rd, Operand j, LiFlags mode = OPTIMIZE_SIZE);
+ inline void li(Register rd, int32_t j, LiFlags mode = OPTIMIZE_SIZE) {
+ li(rd, Operand(j), mode);
+ }
+ inline void li(Register dst, Handle<Object> value,
+ LiFlags mode = OPTIMIZE_SIZE) {
+ li(dst, Operand(value), mode);
}
// Push multiple registers on the stack.
=======================================
--- /branches/bleeding_edge/src/mips/regexp-macro-assembler-mips.cc Tue Mar
13 09:18:30 2012
+++ /branches/bleeding_edge/src/mips/regexp-macro-assembler-mips.cc Wed Mar
21 07:29:14 2012
@@ -386,7 +386,7 @@
// Restore regexp engine registers.
__ MultiPop(regexp_registers_to_retain);
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
__ lw(end_of_input_address(), MemOperand(frame_pointer(), kInputEnd));
// Check if function returned non-zero for success or zero for failure.
@@ -698,7 +698,7 @@
// Initialize backtrack stack pointer.
__ lw(backtrack_stackpointer(), MemOperand(frame_pointer(),
kStackHighEnd));
// Initialize code pointer register
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
// Load previous char as initial value of current character register.
Label at_start;
__ lw(a0, MemOperand(frame_pointer(), kAtStart));
@@ -783,7 +783,7 @@
// String might have moved: Reload end of string from frame.
__ lw(end_of_input_address(), MemOperand(frame_pointer(),
kInputEnd));
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
SafeReturn();
}
@@ -813,7 +813,7 @@
// Otherwise use return value as new stack pointer.
__ mov(backtrack_stackpointer(), v0);
// Restore saved registers and continue.
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
__ lw(end_of_input_address(), MemOperand(frame_pointer(),
kInputEnd));
SafeReturn();
}
@@ -1010,7 +1010,7 @@
__ PrepareCallCFunction(num_arguments, scratch);
__ mov(a2, frame_pointer());
// Code* of self.
- __ li(a1, Operand(masm_->CodeObject()));
+ __ li(a1, Operand(masm_->CodeObject()), CONSTANT_SIZE);
// a0 becomes return address pointer.
ExternalReference stack_guard_check =
ExternalReference::re_check_stack_guard_state(masm_->isolate());
@@ -1229,7 +1229,7 @@
if (OS::ActivationFrameAlignment() != 0) {
__ lw(sp, MemOperand(sp, 16));
}
- __ li(code_pointer(), Operand(masm_->CodeObject()));
+ __ li(code_pointer(), Operand(masm_->CodeObject()), CONSTANT_SIZE);
}
=======================================
--- /branches/bleeding_edge/src/serialize.cc Fri Mar 9 04:07:29 2012
+++ /branches/bleeding_edge/src/serialize.cc Wed Mar 21 07:29:14 2012
@@ -849,13 +849,12 @@
if (how == kFromCode)
{ \
Address location_of_branch_data
= \
reinterpret_cast<Address>(current); \
-
Assembler::set_target_at(location_of_branch_data, \
-
reinterpret_cast<Address>(new_object)); \
- if (within == kFirstInstruction)
{ \
- location_of_branch_data +=
Assembler::kCallTargetSize; \
- current =
reinterpret_cast<Object**>(location_of_branch_data); \
- current_was_incremented =
true; \
-
} \
+
Assembler::deserialization_set_special_target_at( \
+
location_of_branch_data, \
+
reinterpret_cast<Address>(new_object)); \
+ location_of_branch_data +=
Assembler::kSpecialTargetSize; \
+ current =
reinterpret_cast<Object**>(location_of_branch_data); \
+ current_was_incremented =
true; \
} else
{ \
*current =
new_object; \
} \
@@ -991,6 +990,21 @@
// Find a recently deserialized object using its offset from the
current
// allocation point and write a pointer to it to the current object.
ALL_SPACES(kBackref, kPlain, kStartOfObject)
+#if V8_TARGET_ARCH_MIPS
+ // Deserialize a new object from pointer found in code and write
+ // a pointer to it to the current object. Required only for MIPS, and
+ // omitted on the other architectures because it is fully unrolled
and
+ // would cause bloat.
+ ONE_PER_SPACE(kNewObject, kFromCode, kStartOfObject)
+ // Find a recently deserialized code object using its offset from the
+ // current allocation point and write a pointer to it to the current
+ // object. Required only for MIPS.
+ ALL_SPACES(kBackref, kFromCode, kStartOfObject)
+ // Find an already deserialized code object using its offset from
+ // the start and write a pointer to it to the current object.
+ // Required only for MIPS.
+ ALL_SPACES(kFromStart, kFromCode, kStartOfObject)
+#endif
// Find a recently deserialized code object using its offset from the
// current allocation point and write a pointer to its first
instruction
// to the current code object or the instruction pointer in a
function
@@ -1229,12 +1243,23 @@
}
-int Serializer::RootIndex(HeapObject* heap_object) {
+int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
Heap* heap = HEAP;
if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
for (int i = 0; i < root_index_wave_front_; i++) {
Object* root = heap->roots_array_start()[i];
- if (!root->IsSmi() && root == heap_object) return i;
+ if (!root->IsSmi() && root == heap_object) {
+#if V8_TARGET_ARCH_MIPS
+ if (from == kFromCode) {
+ // In order to avoid code bloat in the deserializer we don't have
+ // support for the encoding that specifies a particular root should
+ // be written into the lui/ori instructions on MIPS. Therefore we
+ // should not generate such serialization data for MIPS.
+ return kInvalidRootIndex;
+ }
+#endif
+ return i;
+ }
}
return kInvalidRootIndex;
}
@@ -1287,7 +1312,7 @@
HeapObject* heap_object = HeapObject::cast(o);
int root_index;
- if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
+ if ((root_index = RootIndex(heap_object, how_to_code)) !=
kInvalidRootIndex) {
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
@@ -1359,7 +1384,7 @@
}
int root_index;
- if ((root_index = RootIndex(heap_object)) != kInvalidRootIndex) {
+ if ((root_index = RootIndex(heap_object, how_to_code)) !=
kInvalidRootIndex) {
PutRoot(root_index, heap_object, how_to_code, where_to_point);
return;
}
@@ -1439,7 +1464,7 @@
while (current < end && !(*current)->IsSmi()) {
HeapObject* current_contents = HeapObject::cast(*current);
- int root_index = serializer_->RootIndex(current_contents);
+ int root_index = serializer_->RootIndex(current_contents, kPlain);
// Repeats are not subject to the write barrier so there are only
some
// objects that can be used in a repeat encoding. These are the
early
// ones in the root array that are never in new space.
=======================================
--- /branches/bleeding_edge/src/serialize.h Thu Feb 23 04:11:24 2012
+++ /branches/bleeding_edge/src/serialize.h Wed Mar 21 07:29:14 2012
@@ -485,7 +485,7 @@
protected:
static const int kInvalidRootIndex = -1;
- int RootIndex(HeapObject* heap_object);
+ int RootIndex(HeapObject* heap_object, HowToCode from);
virtual bool ShouldBeInThePartialSnapshotCache(HeapObject* o) = 0;
intptr_t root_index_wave_front() { return root_index_wave_front_; }
void set_root_index_wave_front(intptr_t value) {
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64-inl.h Wed Jan 25 08:31:25
2012
+++ /branches/bleeding_edge/src/x64/assembler-x64-inl.h Wed Mar 21 07:29:14
2012
@@ -235,9 +235,9 @@
int RelocInfo::target_address_size() {
if (IsCodedSpecially()) {
- return Assembler::kCallTargetSize;
+ return Assembler::kSpecialTargetSize;
} else {
- return Assembler::kExternalTargetSize;
+ return kPointerSize;
}
}
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64.cc Tue Feb 21 01:11:35
2012
+++ /branches/bleeding_edge/src/x64/assembler-x64.cc Wed Mar 21 07:29:14
2012
@@ -3044,8 +3044,6 @@
// by branch instructions.
return (1 << rmode_) & kApplyMask;
}
-
-
} } // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/x64/assembler-x64.h Mon Mar 12 06:56:56 2012
+++ /branches/bleeding_edge/src/x64/assembler-x64.h Wed Mar 21 07:29:14 2012
@@ -577,8 +577,8 @@
// This sets the branch destination (which is in the instruction on x64).
// This is for calls and branches within generated code.
- inline static void set_target_at(Address instruction_payload,
- Address target) {
+ inline static void deserialization_set_special_target_at(
+ Address instruction_payload, Address target) {
set_target_address_at(instruction_payload, target);
}
@@ -591,8 +591,7 @@
inline Handle<Object> code_target_object_handle_at(Address pc);
// Number of bytes taken up by the branch target in the code.
- static const int kCallTargetSize = 4; // Use 32-bit displacement.
- static const int kExternalTargetSize = 8; // Use 64-bit absolute.
+ static const int kSpecialTargetSize = 4; // Use 32-bit displacement.
// Distance between the address of the code target in the call
instruction
// and the return address pushed on the stack.
static const int kCallTargetAddressOffset = 4; // Use 32-bit
displacement.
=======================================
--- /branches/bleeding_edge/test/cctest/cctest.status Mon Feb 20 07:34:08
2012
+++ /branches/bleeding_edge/test/cctest/cctest.status Wed Mar 21 07:29:14
2012
@@ -83,10 +83,6 @@
test-debug/DebugBreakLoop: SKIP
-##############################################################################
-[ $arch == mips ]
-test-serialize: SKIP
-
##############################################################################
[ $arch == mips && $crankshaft ]
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev