Revision: 11808
Author: [email protected]
Date: Thu Jun 14 02:20:45 2012
Log: Merged r11784 into 3.10 branch.
ARM: Add literal pool blocking to position critical code.
Original CL: https://chromiumcodereview.appspot.com/10451037/
[email protected]
Review URL: https://chromiumcodereview.appspot.com/10545174
http://code.google.com/p/v8/source/detail?r=11808
Modified:
/branches/3.10/src/arm/code-stubs-arm.cc
/branches/3.10/src/arm/full-codegen-arm.cc
/branches/3.10/src/arm/lithium-codegen-arm.cc
/branches/3.10/src/version.cc
/branches/3.10/test/cctest/test-disasm-arm.cc
=======================================
--- /branches/3.10/src/arm/code-stubs-arm.cc Mon Apr 23 05:57:22 2012
+++ /branches/3.10/src/arm/code-stubs-arm.cc Thu Jun 14 02:20:45 2012
@@ -3737,9 +3737,13 @@
// Compute the return address in lr to return to after the jump below.
Pc is
// already at '+ 8' from the current instruction but return is after
three
// instructions so add another 4 to pc to get the return address.
- masm->add(lr, pc, Operand(4));
- __ str(lr, MemOperand(sp, 0));
- masm->Jump(r5);
+ {
+ // Prevent literal pool emission before return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ masm->add(lr, pc, Operand(4));
+ __ str(lr, MemOperand(sp, 0));
+ masm->Jump(r5);
+ }
if (always_allocate) {
// It's okay to clobber r2 and r3 here. Don't mess with r0 and r1
@@ -3956,14 +3960,21 @@
// Jump to a faked try block that does the invoke, with a faked catch
// block that sets the pending exception.
__ jmp(&invoke);
- __ bind(&handler_entry);
- handler_offset_ = handler_entry.pos();
- // Caught exception: Store result (exception) in the pending exception
- // field in the JSEnv and return a failure sentinel. Coming in here the
- // fp will be invalid because the PushTryHandler below sets it to 0 to
- // signal the existence of the JSEntry frame.
- __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+
+ // Block literal pool emission whilst taking the position of the handler
+ // entry. This avoids making the assumption that literal pools are always
+ // emitted after an instruction is emitted, rather than before.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ bind(&handler_entry);
+ handler_offset_ = handler_entry.pos();
+ // Caught exception: Store result (exception) in the pending exception
+ // field in the JSEnv and return a failure sentinel. Coming in here
the
+ // fp will be invalid because the PushTryHandler below sets it to 0 to
+ // signal the existence of the JSEntry frame.
+ __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
+ isolate)));
+ }
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
@@ -4006,9 +4017,13 @@
// Branch and link to JSEntryTrampoline. We don't use the double
underscore
// macro for the add instruction because we don't want the coverage tool
- // inserting instructions here after we read the pc.
- __ mov(lr, Operand(pc));
- masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // inserting instructions here after we read the pc. We block literal
pool
+ // emission for the same reason.
+ {
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ mov(lr, Operand(pc));
+ masm->add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
+ }
// Unlink this frame from the handler chain.
__ PopTryHandler();
@@ -6805,6 +6820,10 @@
Register target) {
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
RelocInfo::CODE_TARGET));
+
+ // Prevent literal pool emission during calculation of return address.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+
// Push return address (accessible to GC through exit frame pc).
// Note that using pc with str is deprecated.
Label start;
@@ -7165,8 +7184,13 @@
// forth between a compare instructions (a nop in this position) and the
// real branch when we start and stop incremental heap marking.
// See RecordWriteStub::Patch for details.
- __ b(&skip_to_incremental_noncompacting);
- __ b(&skip_to_incremental_compacting);
+ {
+ // Block literal pool emission, as the position of these two
instructions
+ // is assumed by the patching code.
+ Assembler::BlockConstPoolScope block_const_pool(masm);
+ __ b(&skip_to_incremental_noncompacting);
+ __ b(&skip_to_incremental_compacting);
+ }
if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
__ RememberedSetHelper(object_,
=======================================
--- /branches/3.10/src/arm/full-codegen-arm.cc Mon May 21 06:42:45 2012
+++ /branches/3.10/src/arm/full-codegen-arm.cc Thu Jun 14 02:20:45 2012
@@ -73,9 +73,6 @@
Assembler::BlockConstPoolScope block_const_pool(masm_);
__ bind(&patch_site_);
__ cmp(reg, Operand(reg));
- // Don't use b(al, ...) as that might emit the constant pool right
after the
- // branch. After patching when the branch is no longer unconditional
- // execution can continue into the constant pool.
__ b(eq, target); // Always taken before patched.
}
@@ -90,6 +87,8 @@
}
void EmitPatchInfo() {
+ // Block literal pool emission whilst recording patch site information.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
if (patch_site_.is_bound()) {
int delta_to_patch_site =
masm_->InstructionsGeneratedSince(&patch_site_);
Register reg;
@@ -344,6 +343,8 @@
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
Label* back_edge_target) {
Comment cmnt(masm_, "[ Stack check");
+ // Block literal pools whilst emitting stack check code.
+ Assembler::BlockConstPoolScope block_const_pool(masm_);
Label ok;
if (FLAG_count_based_interrupts) {
=======================================
--- /branches/3.10/src/arm/lithium-codegen-arm.cc Thu May 3 02:06:43 2012
+++ /branches/3.10/src/arm/lithium-codegen-arm.cc Thu Jun 14 02:20:45 2012
@@ -571,6 +571,9 @@
LInstruction* instr,
SafepointMode safepoint_mode) {
ASSERT(instr != NULL);
+ // Block literal pool emission to ensure nop indicating no inlined smi
code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
LPointerMap* pointers = instr->pointer_map();
RecordPosition(pointers->position());
__ Call(code, mode);
@@ -1684,6 +1687,9 @@
ASSERT(ToRegister(instr->result()).is(r0));
BinaryOpStub stub(instr->op(), NO_OVERWRITE);
+ // Block literal pool emission to ensure nop indicating no inlined smi
code
+ // is in the correct position.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@@ -2315,20 +2321,25 @@
Label cache_miss;
Register map = temp;
__ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset));
- __ bind(deferred->map_check()); // Label for calculating code patching.
- // We use Factory::the_hole_value() on purpose instead of loading from
the
- // root array to force relocation to be able to later patch with
- // the cached map.
- Handle<JSGlobalPropertyCell> cell =
- factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
- __ mov(ip, Operand(Handle<Object>(cell)));
- __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
- __ cmp(map, Operand(ip));
- __ b(ne, &cache_miss);
- // We use Factory::the_hole_value() on purpose instead of loading from
the
- // root array to force relocation to be able to later patch
- // with true or false.
- __ mov(result, Operand(factory()->the_hole_value()));
+ {
+ // Block constant pool emission to ensure the positions of
instructions are
+ // as expected by the patcher. See InstanceofStub::Generate().
+ Assembler::BlockConstPoolScope block_const_pool(masm());
+ __ bind(deferred->map_check()); // Label for calculating code
patching.
+ // We use Factory::the_hole_value() on purpose instead of loading from
the
+ // root array to force relocation to be able to later patch with
+ // the cached map.
+ Handle<JSGlobalPropertyCell> cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ mov(ip, Operand(Handle<Object>(cell)));
+ __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
+ __ cmp(map, Operand(ip));
+ __ b(ne, &cache_miss);
+ // We use Factory::the_hole_value() on purpose instead of loading from
the
+ // root array to force relocation to be able to later patch
+ // with true or false.
+ __ mov(result, Operand(factory()->the_hole_value()));
+ }
__ b(&done);
// The inlined call site cache did not match. Check null and string
before
@@ -5105,6 +5116,8 @@
int current_pc = masm()->pc_offset();
int patch_size = Deoptimizer::patch_size();
if (current_pc < last_lazy_deopt_pc_ + patch_size) {
+ // Block literal pool emission for duration of padding.
+ Assembler::BlockConstPoolScope block_const_pool(masm());
int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc;
ASSERT_EQ(0, padding_size % Assembler::kInstrSize);
while (padding_size > 0) {
=======================================
--- /branches/3.10/src/version.cc Thu Jun 14 00:48:42 2012
+++ /branches/3.10/src/version.cc Thu Jun 14 02:20:45 2012
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 10
#define BUILD_NUMBER 8
-#define PATCH_LEVEL 17
+#define PATCH_LEVEL 18
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
=======================================
--- /branches/3.10/test/cctest/test-disasm-arm.cc Mon Jan 16 03:42:08 2012
+++ /branches/3.10/test/cctest/test-disasm-arm.cc Thu Jun 14 02:20:45 2012
@@ -92,6 +92,10 @@
if (!DisassembleAndCompare(progcounter, compare_string)) failure =
true; \
}
+// Force emission of any pending literals into a pool.
+#define EMIT_PENDING_LITERALS() \
+ assm.CheckConstPool(true, false)
+
// Verify that all invocations of the COMPARE macro passed successfully.
// Exit with a failure if at least one of the tests failed.
@@ -280,6 +284,10 @@
// is pretty strange anyway.
COMPARE(mov(r5, Operand(0x01234), SetCC, ne),
"159fc000 ldrne ip, [pc, #+0]");
+ // Emit a literal pool now, otherwise this could be dumped later, in
the
+ // middle of a different test.
+ EMIT_PENDING_LITERALS();
+
// We only disassemble one instruction so the eor instruction is not
here.
// The eor does the setcc so we get a movw here.
COMPARE(eor(r5, r4, Operand(0x1234), SetCC, ne),
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev