Revision: 10222
Author: [email protected]
Date: Fri Dec 9 04:01:59 2011
Log: Merge r10215 from the bleeding_edge to the 3.7 branch.
Ensure that non-optimized code objects are not flushed for inlined
functions.
Collector was flushing them if optimized code was reachable only through
the stack (not through the JSFunction object) which happens when you have a
pending lazy deoptimization.
Also prevent v8::Script::New from leaking internal objects allocated by the
compiler into outer HandleScope.
[email protected]
BUG=http://crbug.com/97116
TEST=test/mjsunit/regress/regress-97116.js
Review URL: http://codereview.chromium.org/8888011
------------------------------------------------------------------------
Review URL: http://codereview.chromium.org/8888040
http://code.google.com/p/v8/source/detail?r=10222
Added:
/branches/3.7/test/mjsunit/regress/regress-97116.js
Modified:
/branches/3.7/src/api.cc
/branches/3.7/src/mark-compact.cc
/branches/3.7/src/mark-compact.h
/branches/3.7/src/objects.h
/branches/3.7/src/v8threads.h
/branches/3.7/src/version.cc
=======================================
--- /dev/null
+++ /branches/3.7/test/mjsunit/regress/regress-97116.js Fri Dec 9 04:01:59
2011
@@ -0,0 +1,50 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-gc --allow-natives-syntax
+
+// Check that we are not flushing code for inlined functions that
+// have a pending lazy deoptimization on the stack.
+
+function deopt() {
+ try { } catch (e) { } // Avoid inlining.
+ %DeoptimizeFunction(outer);
+ for (var i = 0; i < 10; i++) gc(); // Force code flushing.
+}
+
+function outer(should_deopt) {
+ inner(should_deopt);
+}
+
+function inner(should_deopt) {
+ if (should_deopt) deopt();
+}
+
+outer(false);
+outer(false);
+%OptimizeFunctionOnNextCall(outer);
+outer(true);
=======================================
--- /branches/3.7/src/api.cc Mon Dec 5 09:22:52 2011
+++ /branches/3.7/src/api.cc Fri Dec 9 04:01:59 2011
@@ -1462,31 +1462,35 @@
ON_BAILOUT(isolate, "v8::Script::New()", return Local<Script>());
LOG_API(isolate, "Script::New");
ENTER_V8(isolate);
- i::Handle<i::String> str = Utils::OpenHandle(*source);
- i::Handle<i::Object> name_obj;
- int line_offset = 0;
- int column_offset = 0;
- if (origin != NULL) {
- if (!origin->ResourceName().IsEmpty()) {
- name_obj = Utils::OpenHandle(*origin->ResourceName());
- }
- if (!origin->ResourceLineOffset().IsEmpty()) {
- line_offset =
static_cast<int>(origin->ResourceLineOffset()->Value());
- }
- if (!origin->ResourceColumnOffset().IsEmpty()) {
- column_offset =
static_cast<int>(origin->ResourceColumnOffset()->Value());
- }
- }
- EXCEPTION_PREAMBLE(isolate);
- i::ScriptDataImpl* pre_data_impl =
static_cast<i::ScriptDataImpl*>(pre_data);
- // We assert that the pre-data is sane, even though we can actually
- // handle it if it turns out not to be in release mode.
- ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
- // If the pre-data isn't sane we simply ignore it
- if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
- pre_data_impl = NULL;
- }
- i::Handle<i::SharedFunctionInfo> result =
+ i::SharedFunctionInfo* raw_result = NULL;
+ { i::HandleScope scope(isolate);
+ i::Handle<i::String> str = Utils::OpenHandle(*source);
+ i::Handle<i::Object> name_obj;
+ int line_offset = 0;
+ int column_offset = 0;
+ if (origin != NULL) {
+ if (!origin->ResourceName().IsEmpty()) {
+ name_obj = Utils::OpenHandle(*origin->ResourceName());
+ }
+ if (!origin->ResourceLineOffset().IsEmpty()) {
+ line_offset =
static_cast<int>(origin->ResourceLineOffset()->Value());
+ }
+ if (!origin->ResourceColumnOffset().IsEmpty()) {
+ column_offset =
+ static_cast<int>(origin->ResourceColumnOffset()->Value());
+ }
+ }
+ EXCEPTION_PREAMBLE(isolate);
+ i::ScriptDataImpl* pre_data_impl =
+ static_cast<i::ScriptDataImpl*>(pre_data);
+ // We assert that the pre-data is sane, even though we can actually
+ // handle it if it turns out not to be in release mode.
+ ASSERT(pre_data_impl == NULL || pre_data_impl->SanityCheck());
+ // If the pre-data isn't sane we simply ignore it
+ if (pre_data_impl != NULL && !pre_data_impl->SanityCheck()) {
+ pre_data_impl = NULL;
+ }
+ i::Handle<i::SharedFunctionInfo> result =
i::Compiler::Compile(str,
name_obj,
line_offset,
@@ -1495,8 +1499,11 @@
pre_data_impl,
Utils::OpenHandle(*script_data),
i::NOT_NATIVES_CODE);
- has_pending_exception = result.is_null();
- EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
+ has_pending_exception = result.is_null();
+ EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
+ raw_result = *result;
+ }
+ i::Handle<i::SharedFunctionInfo> result(raw_result, isolate);
return Local<Script>(ToApi<Script>(result));
}
=======================================
--- /branches/3.7/src/mark-compact.cc Mon Dec 5 09:05:37 2011
+++ /branches/3.7/src/mark-compact.cc Fri Dec 9 04:01:59 2011
@@ -619,8 +619,7 @@
}
void AddCandidate(JSFunction* function) {
- ASSERT(function->unchecked_code() ==
- function->unchecked_shared()->unchecked_code());
+ ASSERT(function->code() == function->shared()->code());
SetNextCandidate(function, jsfunction_candidates_head_);
jsfunction_candidates_head_ = function;
@@ -640,15 +639,15 @@
while (candidate != NULL) {
next_candidate = GetNextCandidate(candidate);
- SharedFunctionInfo* shared = candidate->unchecked_shared();
-
- Code* code = shared->unchecked_code();
+ SharedFunctionInfo* shared = candidate->shared();
+
+ Code* code = shared->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) {
shared->set_code(lazy_compile);
candidate->set_code(lazy_compile);
} else {
- candidate->set_code(shared->unchecked_code());
+ candidate->set_code(shared->code());
}
// We are in the middle of a GC cycle so the write barrier in the
code
@@ -674,7 +673,7 @@
next_candidate = GetNextCandidate(candidate);
SetNextCandidate(candidate, NULL);
- Code* code = candidate->unchecked_code();
+ Code* code = candidate->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
if (!code_mark.Get()) {
candidate->set_code(lazy_compile);
@@ -702,7 +701,7 @@
static SharedFunctionInfo** GetNextCandidateField(
SharedFunctionInfo* candidate) {
- Code* code = candidate->unchecked_code();
+ Code* code = candidate->code();
return reinterpret_cast<SharedFunctionInfo**>(
code->address() + Code::kNextCodeFlushingCandidateOffset);
}
@@ -1037,12 +1036,12 @@
inline static bool IsCompiled(JSFunction* function) {
- return function->unchecked_code() !=
+ return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
}
inline static bool IsCompiled(SharedFunctionInfo* function) {
- return function->unchecked_code() !=
+ return function->code() !=
function->GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
}
@@ -1051,8 +1050,7 @@
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
- MarkBit code_mark =
- Marking::MarkBitFrom(function->unchecked_code());
+ MarkBit code_mark = Marking::MarkBitFrom(function->code());
if (code_mark.Get()) {
if (!Marking::MarkBitFrom(shared_info).Get()) {
shared_info->set_code_age(0);
@@ -1061,7 +1059,7 @@
}
// We do not flush code for optimized functions.
- if (function->code() != shared_info->unchecked_code()) {
+ if (function->code() != shared_info->code()) {
return false;
}
@@ -1072,7 +1070,7 @@
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
MarkBit code_mark =
- Marking::MarkBitFrom(shared_info->unchecked_code());
+ Marking::MarkBitFrom(shared_info->code());
if (code_mark.Get()) {
return false;
}
@@ -1085,16 +1083,24 @@
// We never flush code for Api functions.
Object* function_data = shared_info->function_data();
- if (function_data->IsFunctionTemplateInfo()) return false;
+ if (function_data->IsFunctionTemplateInfo()) {
+ return false;
+ }
// Only flush code for functions.
- if (shared_info->code()->kind() != Code::FUNCTION) return false;
+ if (shared_info->code()->kind() != Code::FUNCTION) {
+ return false;
+ }
// Function must be lazy compilable.
- if (!shared_info->allows_lazy_compilation()) return false;
+ if (!shared_info->allows_lazy_compilation()) {
+ return false;
+ }
// If this is a full script wrapped in a function we do no flush the
code.
- if (shared_info->is_toplevel()) return false;
+ if (shared_info->is_toplevel()) {
+ return false;
+ }
// Age this shared function info.
if (shared_info->code_age() < kCodeAgeThreshold) {
@@ -1267,30 +1273,12 @@
}
if (!flush_code_candidate) {
- Code* code = jsfunction->unchecked_shared()->unchecked_code();
+ Code* code = jsfunction->shared()->code();
MarkBit code_mark = Marking::MarkBitFrom(code);
- heap->mark_compact_collector()->MarkObject(code, code_mark);
-
- if (jsfunction->unchecked_code()->kind() ==
Code::OPTIMIZED_FUNCTION) {
- // For optimized functions we should retain both non-optimized
version
- // of it's code and non-optimized version of all inlined functions.
- // This is required to support bailing out from inlined code.
- DeoptimizationInputData* data =
- reinterpret_cast<DeoptimizationInputData*>(
-
jsfunction->unchecked_code()->unchecked_deoptimization_data());
-
- FixedArray* literals = data->UncheckedLiteralArray();
-
- for (int i = 0, count = data->InlinedFunctionCount()->value();
- i < count;
- i++) {
- JSFunction* inlined =
reinterpret_cast<JSFunction*>(literals->get(i));
- Code* inlined_code =
inlined->unchecked_shared()->unchecked_code();
- MarkBit inlined_code_mark =
- Marking::MarkBitFrom(inlined_code);
- heap->mark_compact_collector()->MarkObject(
- inlined_code, inlined_code_mark);
- }
+ collector->MarkObject(code, code_mark);
+
+ if (jsfunction->code()->kind() == Code::OPTIMIZED_FUNCTION) {
+ collector->MarkInlinedFunctionsCode(jsfunction->code());
}
}
@@ -1415,11 +1403,7 @@
: collector_(collector) {}
void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
- for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
- Code* code = it.frame()->unchecked_code();
- MarkBit code_bit = Marking::MarkBitFrom(code);
- collector_->MarkObject(it.frame()->unchecked_code(), code_bit);
- }
+ collector_->PrepareThreadForCodeFlushing(isolate, top);
}
private:
@@ -1441,8 +1425,8 @@
if (obj->IsSharedFunctionInfo()) {
SharedFunctionInfo* shared =
reinterpret_cast<SharedFunctionInfo*>(obj);
MarkBit shared_mark = Marking::MarkBitFrom(shared);
- MarkBit code_mark = Marking::MarkBitFrom(shared->unchecked_code());
- collector_->MarkObject(shared->unchecked_code(), code_mark);
+ MarkBit code_mark = Marking::MarkBitFrom(shared->code());
+ collector_->MarkObject(shared->code(), code_mark);
collector_->MarkObject(shared, shared_mark);
}
}
@@ -1452,6 +1436,44 @@
};
+void MarkCompactCollector::MarkInlinedFunctionsCode(Code* code) {
+ // For optimized functions we should retain both non-optimized version
+ // of it's code and non-optimized version of all inlined functions.
+ // This is required to support bailing out from inlined code.
+ DeoptimizationInputData* data =
+ DeoptimizationInputData::cast(code->deoptimization_data());
+
+ FixedArray* literals = data->LiteralArray();
+
+ for (int i = 0, count = data->InlinedFunctionCount()->value();
+ i < count;
+ i++) {
+ JSFunction* inlined = JSFunction::cast(literals->get(i));
+ Code* inlined_code = inlined->shared()->code();
+ MarkBit inlined_code_mark = Marking::MarkBitFrom(inlined_code);
+ MarkObject(inlined_code, inlined_code_mark);
+ }
+}
+
+
+void MarkCompactCollector::PrepareThreadForCodeFlushing(Isolate* isolate,
+ ThreadLocalTop*
top) {
+ for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
+ // Note: for the frame that has a pending lazy deoptimization
+ // StackFrame::unchecked_code will return a non-optimized code object
for
+ // the outermost function and StackFrame::LookupCode will return
+ // actual optimized code object.
+ StackFrame* frame = it.frame();
+ Code* code = frame->unchecked_code();
+ MarkBit code_mark = Marking::MarkBitFrom(code);
+ MarkObject(code, code_mark);
+ if (frame->is_optimized()) {
+ MarkInlinedFunctionsCode(frame->LookupCode());
+ }
+ }
+}
+
+
void MarkCompactCollector::PrepareForCodeFlushing() {
ASSERT(heap() == Isolate::Current()->heap());
@@ -1479,11 +1501,8 @@
// Make sure we are not referencing the code from the stack.
ASSERT(this == heap()->mark_compact_collector());
- for (StackFrameIterator it; !it.done(); it.Advance()) {
- Code* code = it.frame()->unchecked_code();
- MarkBit code_mark = Marking::MarkBitFrom(code);
- MarkObject(code, code_mark);
- }
+ PrepareThreadForCodeFlushing(heap()->isolate(),
+ heap()->isolate()->thread_local_top());
// Iterate the archived stacks in all threads to check if
// the code is referenced.
=======================================
--- /branches/3.7/src/mark-compact.h Thu Oct 27 00:38:48 2011
+++ /branches/3.7/src/mark-compact.h Fri Dec 9 04:01:59 2011
@@ -383,6 +383,10 @@
};
+// Defined in isolate.h.
+class ThreadLocalTop;
+
+
//
-------------------------------------------------------------------------
// Mark-Compact collector
class MarkCompactCollector {
@@ -603,6 +607,14 @@
friend class CodeMarkingVisitor;
friend class SharedFunctionInfoMarkingVisitor;
+ // Mark non-optimize code for functions inlined into the given optimized
+ // code. This will prevent it from being flushed.
+ void MarkInlinedFunctionsCode(Code* code);
+
+ // Mark code objects that are active on the stack to prevent them
+ // from being flushed.
+ void PrepareThreadForCodeFlushing(Isolate* isolate, ThreadLocalTop* top);
+
void PrepareForCodeFlushing();
// Marking operations for objects reachable from roots.
=======================================
--- /branches/3.7/src/objects.h Tue Nov 29 06:28:56 2011
+++ /branches/3.7/src/objects.h Fri Dec 9 04:01:59 2011
@@ -3737,11 +3737,6 @@
DEFINE_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
DEFINE_ELEMENT_ACCESSORS(OsrAstId, Smi)
DEFINE_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
-
- // Unchecked accessor to be used during GC.
- FixedArray* UncheckedLiteralArray() {
- return reinterpret_cast<FixedArray*>(get(kLiteralArrayIndex));
- }
#undef DEFINE_ELEMENT_ACCESSORS
=======================================
--- /branches/3.7/src/v8threads.h Tue Sep 13 01:21:47 2011
+++ /branches/3.7/src/v8threads.h Fri Dec 9 04:01:59 2011
@@ -72,7 +72,7 @@
};
-// Defined in top.h
+// Defined in isolate.h.
class ThreadLocalTop;
=======================================
--- /branches/3.7/src/version.cc Wed Dec 7 08:44:40 2011
+++ /branches/3.7/src/version.cc Fri Dec 9 04:01:59 2011
@@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 7
#define BUILD_NUMBER 12
-#define PATCH_LEVEL 8
+#define PATCH_LEVEL 9
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev