Reviewers: yurys,
Message:
This just simplifies the current implementation, it does not yet alleviate
the
problem of recompiling all code the uses MacroAssembler::Allocate macros.
The number of calls into the runtime should be the same, just the allocation
itself is now also done in the runtime. This addresses several issues I see
with
the previous implementation:
- If inlined allocation fails, two events have been emitted.
- In optimized code we now correctly use deferred code.
- All existing code-paths are already thoroughly tested.
This CL should not change tracking semantics, let me know if I missed
something.
Description:
Simplify current inline allocation tracking mechanism.
[email protected]
Please review this at https://codereview.chromium.org/65043006/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+11, -82 lines):
M src/assembler.h
M src/assembler.cc
M src/heap-profiler.h
M src/heap-profiler.cc
M src/heap.cc
M src/serialize.cc
M src/x64/macro-assembler-x64.h
M src/x64/macro-assembler-x64.cc
Index: src/assembler.cc
diff --git a/src/assembler.cc b/src/assembler.cc
index
c06e15b75f32b294e607322a0445d06921b1afd8..fa0b4c375b902f689aeb3944206ecc6536f3779d
100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -1335,14 +1335,6 @@ ExternalReference
ExternalReference::address_of_the_hole_nan() {
}
-ExternalReference ExternalReference::record_object_allocation_function(
- Isolate* isolate) {
- return ExternalReference(
- Redirect(isolate,
-
FUNCTION_ADDR(HeapProfiler::RecordObjectAllocationFromMasm)));
-}
-
-
ExternalReference ExternalReference::address_of_uint32_bias() {
return ExternalReference(
reinterpret_cast<void*>(&double_constants.uint32_bias));
Index: src/assembler.h
diff --git a/src/assembler.h b/src/assembler.h
index
8ec3ad7427692469bda8eb59fc862e40f9446601..887365808bd3b46d19fe1972b7a4666c1c68dc6c
100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -725,9 +725,6 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference get_make_code_young_function(Isolate* isolate);
static ExternalReference get_mark_code_as_executed_function(Isolate*
isolate);
- // New heap objects tracking support.
- static ExternalReference record_object_allocation_function(Isolate*
isolate);
-
// Deoptimization support.
static ExternalReference new_deoptimizer_function(Isolate* isolate);
static ExternalReference compute_output_frames_function(Isolate*
isolate);
Index: src/heap-profiler.cc
diff --git a/src/heap-profiler.cc b/src/heap-profiler.cc
index
6b159a98a338c7b2704ed924a59bc401f8425663..c0016fbd2b9f3a3ecc30c7eef328c3f8af46be7a
100644
--- a/src/heap-profiler.cc
+++ b/src/heap-profiler.cc
@@ -171,13 +171,6 @@ void HeapProfiler::StopHeapAllocationsRecording() {
}
-void HeapProfiler::RecordObjectAllocationFromMasm(Isolate* isolate,
- Address obj,
- int size) {
- isolate->heap_profiler()->NewObjectEvent(obj, size);
-}
-
-
void HeapProfiler::DropCompiledCode() {
Isolate* isolate = heap()->isolate();
HandleScope scope(isolate);
Index: src/heap-profiler.h
diff --git a/src/heap-profiler.h b/src/heap-profiler.h
index
74002278d40beb1a23a06523da4d5c6066911c9f..06345fe98aa3d2baacd053939aa9f10c0940dec8
100644
--- a/src/heap-profiler.h
+++ b/src/heap-profiler.h
@@ -56,10 +56,6 @@ class HeapProfiler {
void StartHeapObjectsTracking();
void StopHeapObjectsTracking();
- static void RecordObjectAllocationFromMasm(Isolate* isolate,
- Address obj,
- int size);
-
SnapshotObjectId PushHeapObjectsStats(OutputStream* stream);
int GetSnapshotsCount();
HeapSnapshot* GetSnapshot(int index);
Index: src/heap.cc
diff --git a/src/heap.cc b/src/heap.cc
index
8eb8ae24b2b01ca992b2182b93c8196eea117e6d..2ba39198e1489427b56b57193ea04556695882e0
100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -840,9 +840,7 @@ static bool AbortIncrementalMarkingAndCollectGarbage(
}
-void Heap::ReserveSpace(
- int *sizes,
- Address *locations_out) {
+void Heap::ReserveSpace(int *sizes, Address *locations_out) {
bool gc_performed = true;
int counter = 0;
static const int kThreshold = 20;
Index: src/serialize.cc
diff --git a/src/serialize.cc b/src/serialize.cc
index
89d6b14fceeb2a67c56ac9afb283831501b52026..26611e7a3e86ef70a38531364a814d0fe2e87c20
100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -568,17 +568,13 @@ void ExternalReferenceTable::PopulateTable(Isolate*
isolate) {
UNCLASSIFIED,
61,
"Heap::allocation_sites_list_address()");
-
Add(ExternalReference::record_object_allocation_function(isolate).address(),
- UNCLASSIFIED,
- 62,
- "HeapProfiler::RecordObjectAllocationFromMasm");
Add(ExternalReference::address_of_uint32_bias().address(),
UNCLASSIFIED,
- 63,
+ 62,
"uint32_bias");
Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
UNCLASSIFIED,
- 64,
+ 63,
"Code::MarkCodeAsExecuted");
// Add a small set of deopt entry addresses to encoder without
generating the
Index: src/x64/macro-assembler-x64.cc
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index
9e8568c2de5b260edfd23c53c33f599a2792b857..df984fcaf075e67e964b4e1cdd0c68a135d7dcb9
100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -4081,7 +4081,10 @@ void MacroAssembler::Allocate(int object_size,
AllocationFlags flags) {
ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
- if (!FLAG_inline_new) {
+ if (!FLAG_inline_new ||
+ // TODO(mstarzinger): Implement more efficiently by keeping then
+ // bump-pointer allocation area empty instead of recompiling code.
+ isolate()->heap_profiler()->is_tracking_allocations()) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
movl(result, Immediate(0x7091));
@@ -4100,10 +4103,6 @@ void MacroAssembler::Allocate(int object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
- if (isolate()->heap_profiler()->is_tracking_allocations()) {
- RecordObjectAllocation(isolate(), result, object_size);
- }
-
// Align the next allocation. Storing the filler map without checking
top is
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
@@ -4165,7 +4164,10 @@ void MacroAssembler::Allocate(Register object_size,
Label* gc_required,
AllocationFlags flags) {
ASSERT((flags & SIZE_IN_WORDS) == 0);
- if (!FLAG_inline_new) {
+ if (!FLAG_inline_new ||
+ // TODO(mstarzinger): Implement more efficiently by keeping then
+ // bump-pointer allocation area empty instead of recompiling code.
+ isolate()->heap_profiler()->is_tracking_allocations()) {
if (emit_debug_code()) {
// Trash the registers to simulate an allocation failure.
movl(result, Immediate(0x7091));
@@ -4183,10 +4185,6 @@ void MacroAssembler::Allocate(Register object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, scratch, flags);
- if (isolate()->heap_profiler()->is_tracking_allocations()) {
- RecordObjectAllocation(isolate(), result, object_size);
- }
-
// Align the next allocation. Storing the filler map without checking
top is
// safe in new-space because the limit of the heap is aligned there.
if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) {
@@ -4947,38 +4945,6 @@ void MacroAssembler::TestJSArrayForAllocationMemento(
}
-void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
- Register object,
- Register object_size) {
- FrameScope frame(this, StackFrame::EXIT);
- PushSafepointRegisters();
- PrepareCallCFunction(3);
- // In case object is rdx
- movq(kScratchRegister, object);
- movq(arg_reg_3, object_size);
- movq(arg_reg_2, kScratchRegister);
- movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
- CallCFunction(
- ExternalReference::record_object_allocation_function(isolate), 3);
- PopSafepointRegisters();
-}
-
-
-void MacroAssembler::RecordObjectAllocation(Isolate* isolate,
- Register object,
- int object_size) {
- FrameScope frame(this, StackFrame::EXIT);
- PushSafepointRegisters();
- PrepareCallCFunction(3);
- movq(arg_reg_2, object);
- movq(arg_reg_3, Immediate(object_size));
- movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE);
- CallCFunction(
- ExternalReference::record_object_allocation_function(isolate), 3);
- PopSafepointRegisters();
-}
-
-
void MacroAssembler::JumpIfDictionaryInPrototypeChain(
Register object,
Register scratch0,
Index: src/x64/macro-assembler-x64.h
diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h
index
2f7bdd26b8a70a570d48d74b3435927c66cb5049..4bea4612d5fdb030b5ea198bbdff50997d90edca
100644
--- a/src/x64/macro-assembler-x64.h
+++ b/src/x64/macro-assembler-x64.h
@@ -1116,15 +1116,6 @@ class MacroAssembler: public Assembler {
Label* gc_required,
AllocationFlags flags);
- // Record a JS object allocation if allocations tracking mode is on.
- void RecordObjectAllocation(Isolate* isolate,
- Register object,
- Register object_size);
-
- void RecordObjectAllocation(Isolate* isolate,
- Register object,
- int object_size);
-
// Undo allocation in new space. The object passed and objects allocated
after
// it will no longer be allocated. Make sure that no pointers are left
to the
// object(s) no longer allocated as they would be invalid when
allocation is
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.