Revision: 15854
Author: [email protected]
Date: Wed Jul 24 04:12:17 2013
Log: Factor out common code from platform-specific deoptimization. Fix
Deoptimizer not to need to partition functions, but revoke their code
before patching, allowing deoptimizing_code_list to be removed from Code;
Add DeoptimizeCodeList API to deoptimizer, which works on a ZoneList<Code*>.
BUG=
[email protected], [email protected]
Review URL: https://codereview.chromium.org/19638014
http://code.google.com/p/v8/source/detail?r=15854
Modified:
/branches/bleeding_edge/src/arm/deoptimizer-arm.cc
/branches/bleeding_edge/src/deoptimizer.cc
/branches/bleeding_edge/src/deoptimizer.h
/branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc
/branches/bleeding_edge/src/mark-compact.cc
/branches/bleeding_edge/src/mark-compact.h
/branches/bleeding_edge/src/mips/deoptimizer-mips.cc
/branches/bleeding_edge/src/objects-inl.h
/branches/bleeding_edge/src/objects.cc
/branches/bleeding_edge/src/objects.h
/branches/bleeding_edge/src/x64/deoptimizer-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Tue Jul 23 06:46:10
2013
+++ /branches/bleeding_edge/src/arm/deoptimizer-arm.cc Wed Jul 24 04:12:17
2013
@@ -44,22 +44,8 @@
}
-void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction* function) {
- Isolate* isolate = function->GetIsolate();
- HandleScope scope(isolate);
- DisallowHeapAllocation no_allocation;
-
- ASSERT(function->IsOptimized());
- ASSERT(function->FunctionsInFunctionListShareSameCode());
-
- // Get the optimized code.
- Code* code = function->code();
+void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code)
{
Address code_start_address = code->instruction_start();
-
- // The optimized code is going to be patched, so we cannot use it any
more.
- function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized
function");
-
// Invalidate the relocation information, as it will become invalid by
the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
@@ -92,25 +78,6 @@
prev_call_address = call_address;
#endif
}
-
- // Add the deoptimizing code to the list.
- DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
- DeoptimizerData* data = isolate->deoptimizer_data();
- node->set_next(data->deoptimizing_code_list_);
- data->deoptimizing_code_list_ = node;
-
- // We might be in the middle of incremental marking with compaction.
- // Tell collector to treat this code object in a special way and
- // ignore all slots that might have been recorded on it.
- isolate->heap()->mark_compact_collector()->InvalidateCode(code);
-
- ReplaceCodeForRelatedFunctions(function, code);
-
- if (FLAG_trace_deopt) {
- PrintF("[forced deoptimization: ");
- function->PrintName();
- PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
- }
}
=======================================
--- /branches/bleeding_edge/src/deoptimizer.cc Tue Jul 23 06:46:10 2013
+++ /branches/bleeding_edge/src/deoptimizer.cc Wed Jul 24 04:12:17 2013
@@ -331,34 +331,47 @@
// Removes the functions selected by the given filter from the optimized
-// function list of the given context and partitions the removed functions
-// into one or more lists such that all functions in a list share the same
-// code. The head of each list is written in the deoptimizing_functions
field
-// of the corresponding code object.
-// The found code objects are returned in the given zone list.
-static void PartitionOptimizedFunctions(Context* context,
- OptimizedFunctionFilter* filter,
- ZoneList<Code*>* partitions,
- Zone* zone,
- Object* undefined) {
+// function list of the given context and adds their code to the list of
+// code objects to be deoptimized.
+static void SelectCodeToDeoptimize(Context* context,
+ OptimizedFunctionFilter* filter,
+ ZoneList<Code*>* codes,
+ Zone* zone,
+ Object* undefined) {
DisallowHeapAllocation no_allocation;
Object* current = context->get(Context::OPTIMIZED_FUNCTIONS_LIST);
Object* remainder_head = undefined;
Object* remainder_tail = undefined;
- ASSERT_EQ(0, partitions->length());
+
+ // TODO(titzer): rewrite to not modify unselected functions.
while (current != undefined) {
JSFunction* function = JSFunction::cast(current);
current = function->next_function_link();
if (filter->TakeFunction(function)) {
+ // Extract this function from the context's list and remember the
code.
Code* code = function->code();
- if (code->deoptimizing_functions() == undefined) {
- partitions->Add(code, zone);
+ ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION);
+ if (code->marked_for_deoptimization()) {
+ ASSERT(codes->Contains(code));
} else {
- ASSERT(partitions->Contains(code));
+ code->set_marked_for_deoptimization(true);
+ codes->Add(code, zone);
+ }
+ SharedFunctionInfo* shared = function->shared();
+ // Replace the function's code with the shared code.
+ function->set_code(shared->code());
+ // Evict the code from the optimized code map.
+ shared->EvictFromOptimizedCodeMap(code, "deoptimized function");
+ // Remove the function from the optimized functions list.
+ function->set_next_function_link(undefined);
+
+ if (FLAG_trace_deopt) {
+ PrintF("[forced deoptimization: ");
+ function->PrintName();
+ PrintF(" / %" V8PRIxPTR "]\n",
reinterpret_cast<intptr_t>(function));
}
- function->set_next_function_link(code->deoptimizing_functions());
- code->set_deoptimizing_functions(function);
} else {
+ // Don't select this function; link it back into the list.
if (remainder_head == undefined) {
remainder_head = function;
} else {
@@ -393,6 +406,14 @@
};
+class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
+ public:
+ virtual bool TakeFunction(JSFunction* function) {
+ return function->code()->marked_for_deoptimization();
+ }
+};
+
+
void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
DisallowHeapAllocation no_allocation;
@@ -421,19 +442,11 @@
void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
- if (!function->IsOptimized()) return;
Code* code = function->code();
- Context* context = function->context()->native_context();
- Isolate* isolate = context->GetIsolate();
- Object* undefined = isolate->heap()->undefined_value();
- Zone zone(isolate);
- ZoneList<Code*> codes(1, &zone);
+ if (code->kind() != Code::OPTIMIZED_FUNCTION) return;
DeoptimizeWithMatchingCodeFilter filter(code);
- PartitionOptimizedFunctions(context, &filter, &codes, &zone, undefined);
- ASSERT_EQ(1, codes.length());
- DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction::cast(codes.at(0)->deoptimizing_functions()));
- codes.at(0)->set_deoptimizing_functions(undefined);
+ DeoptimizeAllFunctionsForContext(
+ function->context()->native_context(), &filter);
}
@@ -443,12 +456,10 @@
Isolate* isolate = context->GetIsolate();
Object* undefined = isolate->heap()->undefined_value();
Zone zone(isolate);
- ZoneList<Code*> codes(1, &zone);
- PartitionOptimizedFunctions(context, filter, &codes, &zone, undefined);
- for (int i = 0; i < codes.length(); ++i) {
- DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction::cast(codes.at(i)->deoptimizing_functions()));
- codes.at(i)->set_deoptimizing_functions(undefined);
+ ZoneList<Code*> codes(4, &zone);
+ SelectCodeToDeoptimize(context, filter, &codes, &zone, undefined);
+ for (int i = 0; i < codes.length(); i++) {
+ DeoptimizeCode(isolate, codes.at(i));
}
}
@@ -466,6 +477,55 @@
}
+void Deoptimizer::DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>*
codes) {
+ if (codes->length() == 0) return; // Nothing to do.
+
+ // Mark the code; any functions refering to this code will be selected.
+ for (int i = 0; i < codes->length(); i++) {
+ ASSERT(!codes->at(i)->marked_for_deoptimization());
+ codes->at(i)->set_marked_for_deoptimization(true);
+ }
+
+ // For all contexts, remove optimized functions that refer to the
selected
+ // code from the optimized function lists.
+ Object* undefined = isolate->heap()->undefined_value();
+ Zone zone(isolate);
+ Object* list = isolate->heap()->native_contexts_list();
+ DeoptimizeMarkedCodeFilter filter;
+ while (!list->IsUndefined()) {
+ Context* context = Context::cast(list);
+ // Note that selecting code unlinks the functions that refer to it.
+ SelectCodeToDeoptimize(context, &filter, codes, &zone, undefined);
+ list = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
+ }
+
+ // Now deoptimize all the code.
+ for (int i = 0; i < codes->length(); i++) {
+ DeoptimizeCode(isolate, codes->at(i));
+ }
+}
+
+
+void Deoptimizer::DeoptimizeCode(Isolate* isolate, Code* code) {
+ HandleScope scope(isolate);
+ DisallowHeapAllocation nha;
+
+ // Do platform-specific patching of the optimized code.
+ PatchCodeForDeoptimization(isolate, code);
+
+ // Add the deoptimizing code to the list.
+ DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
+ DeoptimizerData* data = isolate->deoptimizer_data();
+ node->set_next(data->deoptimizing_code_list_);
+ data->deoptimizing_code_list_ = node;
+
+ // We might be in the middle of incremental marking with compaction.
+ // Tell collector to treat this code object in a special way and
+ // ignore all slots that might have been recorded on it.
+ isolate->heap()->mark_compact_collector()->InvalidateCode(code);
+}
+
+
void Deoptimizer::HandleWeakDeoptimizedCode(v8::Isolate* isolate,
v8::Persistent<v8::Value>* obj,
void* parameter) {
@@ -2569,21 +2629,6 @@
data->deopt_entry_code_entries_[type] = entry_count;
}
-
-
-void Deoptimizer::ReplaceCodeForRelatedFunctions(JSFunction* function,
- Code* code) {
- SharedFunctionInfo* shared = function->shared();
- Object* undefined = function->GetHeap()->undefined_value();
- Object* current = function;
-
- while (current != undefined) {
- JSFunction* func = JSFunction::cast(current);
- current = func->next_function_link();
- func->set_code(shared->code());
- func->set_next_function_link(undefined);
- }
-}
FrameDescription::FrameDescription(uint32_t frame_size,
=======================================
--- /branches/bleeding_edge/src/deoptimizer.h Tue Jul 23 06:46:10 2013
+++ /branches/bleeding_edge/src/deoptimizer.h Wed Jul 24 04:12:17 2013
@@ -197,6 +197,8 @@
static void DeoptimizeAllFunctionsWith(Isolate* isolate,
OptimizedFunctionFilter* filter);
+ static void DeoptimizeCodeList(Isolate* isolate, ZoneList<Code*>* codes);
+
static void DeoptimizeAllFunctionsForContext(
Context* context, OptimizedFunctionFilter* filter);
@@ -411,9 +413,11 @@
v8::Persistent<v8::Value>* obj,
void* data);
- // Deoptimize function assuming that function->next_function_link()
points
- // to a list that contains all functions that share the same optimized
code.
- static void DeoptimizeFunctionWithPreparedFunctionList(JSFunction*
function);
+ // Deoptimize the given code and add to appropriate deoptimization lists.
+ static void DeoptimizeCode(Isolate* isolate, Code* code);
+
+ // Patch the given code so that it will deoptimize itself.
+ static void PatchCodeForDeoptimization(Isolate* isolate, Code* code);
// Fill the input from from a JavaScript frame. This is used when
// the debugger needs to inspect an optimized frame. For normal
=======================================
--- /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Tue Jul 23
06:46:10 2013
+++ /branches/bleeding_edge/src/ia32/deoptimizer-ia32.cc Wed Jul 24
04:12:17 2013
@@ -114,22 +114,8 @@
}
-void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction* function) {
- Isolate* isolate = function->GetIsolate();
- HandleScope scope(isolate);
- DisallowHeapAllocation nha;
-
- ASSERT(function->IsOptimized());
- ASSERT(function->FunctionsInFunctionListShareSameCode());
-
- // Get the optimized code.
- Code* code = function->code();
+void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code)
{
Address code_start_address = code->instruction_start();
-
- // The optimized code is going to be patched, so we cannot use it any
more.
- function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized
function");
-
// We will overwrite the code's relocation info in-place. Relocation info
// is written backward. The relocation info is the payload of a byte
// array. Later on we will slide this to the start of the byte array and
@@ -188,25 +174,6 @@
ASSERT(junk_address <= reloc_end_address);
isolate->heap()->CreateFillerObjectAt(junk_address,
reloc_end_address - junk_address);
-
- // Add the deoptimizing code to the list.
- DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
- DeoptimizerData* data = isolate->deoptimizer_data();
- node->set_next(data->deoptimizing_code_list_);
- data->deoptimizing_code_list_ = node;
-
- // We might be in the middle of incremental marking with compaction.
- // Tell collector to treat this code object in a special way and
- // ignore all slots that might have been recorded on it.
- isolate->heap()->mark_compact_collector()->InvalidateCode(code);
-
- ReplaceCodeForRelatedFunctions(function, code);
-
- if (FLAG_trace_deopt) {
- PrintF("[forced deoptimization: ");
- function->PrintName();
- PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
- }
}
=======================================
--- /branches/bleeding_edge/src/mark-compact.cc Wed Jul 24 02:19:55 2013
+++ /branches/bleeding_edge/src/mark-compact.cc Wed Jul 24 04:12:17 2013
@@ -73,8 +73,8 @@
migration_slots_buffer_(NULL),
heap_(NULL),
code_flusher_(NULL),
- encountered_weak_collections_(NULL) { }
-
+ encountered_weak_collections_(NULL),
+ code_to_deoptimize_(NULL) { }
#ifdef VERIFY_HEAP
class VerifyMarkingVisitor: public ObjectVisitor {
@@ -492,7 +492,7 @@
obj = code_iterator.Next()) {
Code* code = Code::cast(obj);
if (code->kind() != Code::OPTIMIZED_FUNCTION) continue;
- if (code->marked_for_deoptimization()) continue;
+ if (WillBeDeoptimized(code)) continue;
code->VerifyEmbeddedMapsDependency();
}
}
@@ -945,14 +945,6 @@
}
-class DeoptimizeMarkedCodeFilter : public OptimizedFunctionFilter {
- public:
- virtual bool TakeFunction(JSFunction* function) {
- return function->code()->marked_for_deoptimization();
- }
-};
-
-
void MarkCompactCollector::Finish() {
#ifdef DEBUG
ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
@@ -964,8 +956,23 @@
// objects (empty string, illegal builtin).
isolate()->stub_cache()->Clear();
- DeoptimizeMarkedCodeFilter filter;
- Deoptimizer::DeoptimizeAllFunctionsWith(isolate(), &filter);
+ if (code_to_deoptimize_ != Smi::FromInt(0)) {
+ // Convert the linked list of Code objects into a ZoneList.
+ Zone zone(isolate());
+ ZoneList<Code*> codes(4, &zone);
+
+ Object *list = code_to_deoptimize_;
+ while (list->IsCode()) {
+ Code *code = Code::cast(list);
+ list = code->code_to_deoptimize_link();
+ codes.Add(code, &zone);
+ // Destroy the link and don't ever try to deoptimize this code again.
+ code->set_code_to_deoptimize_link(Smi::FromInt(0));
+ }
+ code_to_deoptimize_ = Smi::FromInt(0);
+
+ Deoptimizer::DeoptimizeCodeList(isolate(), &codes);
+ }
}
@@ -2610,8 +2617,17 @@
// and ClearAndDeoptimizeDependentCode shouldn't be called.
ASSERT(entries->is_code_at(i));
Code* code = entries->code_at(i);
- if (IsMarked(code) && !code->marked_for_deoptimization()) {
- code->set_marked_for_deoptimization(true);
+
+ if (IsMarked(code) && !WillBeDeoptimized(code)) {
+ // Insert the code into the code_to_deoptimize linked list.
+ Object* next = code_to_deoptimize_;
+ if (next != Smi::FromInt(0)) {
+ // Record the slot so that it is updated.
+ Object** slot = code->code_to_deoptimize_link_slot();
+ RecordSlot(slot, slot, next);
+ }
+ code->set_code_to_deoptimize_link(next);
+ code_to_deoptimize_ = code;
}
entries->clear_at(i);
}
@@ -2632,7 +2648,7 @@
Object* obj = entries->object_at(i);
ASSERT(obj->IsCode() || IsMarked(obj));
if (IsMarked(obj) &&
- (!obj->IsCode() |
| !Code::cast(obj)->marked_for_deoptimization())) {
+ (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
if (new_number_of_entries + group_number_of_entries != i) {
entries->set_object_at(
new_number_of_entries + group_number_of_entries, obj);
@@ -3271,6 +3287,16 @@
invalidated_code_.Add(code);
}
}
+
+
+// Return true if the given code is deoptimized or will be deoptimized.
+bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
+ // We assume the code_to_deoptimize_link is initialized to undefined.
+ // If it is 0, or refers to another Code object, then this code
+ // is already linked, or was already linked into the list.
+ return code->code_to_deoptimize_link() != heap()->undefined_value()
+ || code->marked_for_deoptimization();
+}
bool MarkCompactCollector::MarkInvalidatedCode() {
@@ -3456,8 +3482,9 @@
}
}
- // Update pointer from the native contexts list.
+ // Update the heads of the native contexts list the code to deoptimize
list.
updating_visitor.VisitPointer(heap_->native_contexts_list_address());
+ updating_visitor.VisitPointer(&code_to_deoptimize_);
heap_->string_table()->Iterate(&updating_visitor);
=======================================
--- /branches/bleeding_edge/src/mark-compact.h Mon Jul 22 01:32:24 2013
+++ /branches/bleeding_edge/src/mark-compact.h Wed Jul 24 04:12:17 2013
@@ -743,6 +743,7 @@
~MarkCompactCollector();
bool MarkInvalidatedCode();
+ bool WillBeDeoptimized(Code* code);
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
@@ -946,6 +947,7 @@
MarkingDeque marking_deque_;
CodeFlusher* code_flusher_;
Object* encountered_weak_collections_;
+ Object* code_to_deoptimize_;
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
=======================================
--- /branches/bleeding_edge/src/mips/deoptimizer-mips.cc Tue Jul 23
06:46:10 2013
+++ /branches/bleeding_edge/src/mips/deoptimizer-mips.cc Wed Jul 24
04:12:17 2013
@@ -43,22 +43,8 @@
}
-void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction* function) {
- Isolate* isolate = function->GetIsolate();
- HandleScope scope(isolate);
- DisallowHeapAllocation nha;
-
- ASSERT(function->IsOptimized());
- ASSERT(function->FunctionsInFunctionListShareSameCode());
-
- // Get the optimized code.
- Code* code = function->code();
+void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code)
{
Address code_start_address = code->instruction_start();
-
- // The optimized code is going to be patched, so we cannot use it any
more.
- function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized
function");
-
// Invalidate the relocation information, as it will become invalid by
the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
@@ -89,30 +75,6 @@
prev_call_address = call_address;
#endif
}
-
- // Add the deoptimizing code to the list.
- DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
- DeoptimizerData* data = isolate->deoptimizer_data();
- node->set_next(data->deoptimizing_code_list_);
- data->deoptimizing_code_list_ = node;
-
- // We might be in the middle of incremental marking with compaction.
- // Tell collector to treat this code object in a special way and
- // ignore all slots that might have been recorded on it.
- isolate->heap()->mark_compact_collector()->InvalidateCode(code);
-
- ReplaceCodeForRelatedFunctions(function, code);
-
- if (FLAG_trace_deopt) {
- PrintF("[forced deoptimization: ");
- function->PrintName();
- PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function));
-#ifdef DEBUG
- if (FLAG_print_code) {
- code->PrintLn();
- }
-#endif
- }
}
=======================================
--- /branches/bleeding_edge/src/objects-inl.h Tue Jul 23 02:18:42 2013
+++ /branches/bleeding_edge/src/objects-inl.h Wed Jul 24 04:12:17 2013
@@ -5239,15 +5239,22 @@
}
-void Code::set_deoptimizing_functions(Object* value) {
+Object* Code::code_to_deoptimize_link() {
+ // Optimized code should not have type feedback.
+ ASSERT(kind() == OPTIMIZED_FUNCTION);
+ return READ_FIELD(this, kTypeFeedbackInfoOffset);
+}
+
+
+void Code::set_code_to_deoptimize_link(Object* value) {
ASSERT(kind() == OPTIMIZED_FUNCTION);
WRITE_FIELD(this, kTypeFeedbackInfoOffset, value);
}
-Object* Code::deoptimizing_functions() {
+Object** Code::code_to_deoptimize_link_slot() {
ASSERT(kind() == OPTIMIZED_FUNCTION);
- return Object::cast(READ_FIELD(this, kTypeFeedbackInfoOffset));
+ return HeapObject::RawField(this, kTypeFeedbackInfoOffset);
}
=======================================
--- /branches/bleeding_edge/src/objects.cc Wed Jul 24 00:45:54 2013
+++ /branches/bleeding_edge/src/objects.cc Wed Jul 24 04:12:17 2013
@@ -11348,14 +11348,6 @@
}
-class DeoptimizeDependentCodeFilter : public OptimizedFunctionFilter {
- public:
- virtual bool TakeFunction(JSFunction* function) {
- return function->code()->marked_for_deoptimization();
- }
-};
-
-
void DependentCode::DeoptimizeDependentCodeGroup(
Isolate* isolate,
DependentCode::DependencyGroup group) {
@@ -11365,10 +11357,14 @@
int end = starts.at(group + 1);
int code_entries = starts.number_of_entries();
if (start == end) return;
+
+ // Collect all the code to deoptimize.
+ Zone zone(isolate);
+ ZoneList<Code*> codes(end - start, &zone);
for (int i = start; i < end; i++) {
if (is_code_at(i)) {
Code* code = code_at(i);
- code->set_marked_for_deoptimization(true);
+ if (!code->marked_for_deoptimization()) codes.Add(code, &zone);
} else {
CompilationInfo* info = compilation_info_at(i);
info->AbortDueToDependencyChange();
@@ -11384,8 +11380,7 @@
clear_at(i);
}
set_number_of_entries(group, 0);
- DeoptimizeDependentCodeFilter filter;
- Deoptimizer::DeoptimizeAllFunctionsWith(isolate, &filter);
+ Deoptimizer::DeoptimizeCodeList(isolate, &codes);
}
=======================================
--- /branches/bleeding_edge/src/objects.h Tue Jul 23 12:05:00 2013
+++ /branches/bleeding_edge/src/objects.h Wed Jul 24 04:12:17 2013
@@ -4561,7 +4561,7 @@
// [type_feedback_info]: Struct containing type feedback information for
// unoptimized code. Optimized code can temporarily store the head of
- // the list of the dependent optimized functions during deoptimization.
+ // the list of code to be deoptimized during mark-compact GC.
// STUBs can use this slot to store arbitrary information as a Smi.
// Will contain either a TypeFeedbackInfo object, or JSFunction object,
// or undefined, or a Smi.
@@ -4569,8 +4569,11 @@
inline void InitializeTypeFeedbackInfoNoWriteBarrier(Object* value);
inline int stub_info();
inline void set_stub_info(int info);
- inline Object* deoptimizing_functions();
- inline void set_deoptimizing_functions(Object* value);
+
+ // Used during GC to code a list of code objects to deoptimize.
+ inline Object* code_to_deoptimize_link();
+ inline void set_code_to_deoptimize_link(Object* value);
+ inline Object** code_to_deoptimize_link_slot();
// [gc_metadata]: Field used to hold GC related metadata. The contents
of this
// field does not have to be traced during garbage collection since
@@ -6762,18 +6765,6 @@
// Retrieve the native context from a function's literal array.
static Context* NativeContextFromLiterals(FixedArray* literals);
-#ifdef DEBUG
- bool FunctionsInFunctionListShareSameCode() {
- Object* current = this;
- while (!current->IsUndefined()) {
- JSFunction* function = JSFunction::cast(current);
- current = function->next_function_link();
- if (function->code() != this->code()) return false;
- }
- return true;
- }
-#endif
-
bool PassesHydrogenFilter();
// Layout descriptors. The last property (from kNonWeakFieldsEndOffset to
=======================================
--- /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Tue Jul 23 06:46:10
2013
+++ /branches/bleeding_edge/src/x64/deoptimizer-x64.cc Wed Jul 24 04:12:17
2013
@@ -46,21 +46,7 @@
}
-void Deoptimizer::DeoptimizeFunctionWithPreparedFunctionList(
- JSFunction* function) {
- Isolate* isolate = function->GetIsolate();
- HandleScope scope(isolate);
- DisallowHeapAllocation nha;
-
- ASSERT(function->IsOptimized());
- ASSERT(function->FunctionsInFunctionListShareSameCode());
-
- // Get the optimized code.
- Code* code = function->code();
-
- // The optimized code is going to be patched, so we cannot use it any
more.
- function->shared()->EvictFromOptimizedCodeMap(code, "deoptimized
function");
-
+void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code)
{
// Invalidate the relocation information, as it will become invalid by
the
// code patching below, and is not needed any more.
code->InvalidateRelocation();
@@ -71,7 +57,7 @@
// before the safepoint table (space was allocated there when the Code
// object was created, if necessary).
- Address instruction_start = function->code()->instruction_start();
+ Address instruction_start = code->instruction_start();
#ifdef DEBUG
Address prev_call_address = NULL;
#endif
@@ -93,25 +79,6 @@
prev_call_address = call_address;
#endif
}
-
- // Add the deoptimizing code to the list.
- DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
- DeoptimizerData* data = isolate->deoptimizer_data();
- node->set_next(data->deoptimizing_code_list_);
- data->deoptimizing_code_list_ = node;
-
- // We might be in the middle of incremental marking with compaction.
- // Tell collector to treat this code object in a special way and
- // ignore all slots that might have been recorded on it.
- isolate->heap()->mark_compact_collector()->InvalidateCode(code);
-
- ReplaceCodeForRelatedFunctions(function, code);
-
- if (FLAG_trace_deopt) {
- PrintF("[forced deoptimization: ");
- function->PrintName();
- PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
- }
}
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.