Revision: 24871
Author: [email protected]
Date: Fri Oct 24 12:40:05 2014 UTC
Log: Tweaks to the code serializer.
- consider the source string as a special sort of back reference.
- use repeat op code for more root members.
[email protected]
Review URL: https://codereview.chromium.org/674883002
https://code.google.com/p/v8/source/detail?r=24871
Modified:
/branches/bleeding_edge/src/heap/heap.cc
/branches/bleeding_edge/src/heap/heap.h
/branches/bleeding_edge/src/hydrogen-instructions.cc
/branches/bleeding_edge/src/serialize.cc
/branches/bleeding_edge/src/serialize.h
/branches/bleeding_edge/test/cctest/test-serialize.cc
=======================================
--- /branches/bleeding_edge/src/heap/heap.cc Thu Oct 23 15:16:27 2014 UTC
+++ /branches/bleeding_edge/src/heap/heap.cc Fri Oct 24 12:40:05 2014 UTC
@@ -4549,6 +4549,19 @@
UNREACHABLE();
return false;
}
+
+
+bool Heap::RootIsImmortalImmovable(int root_index) {
+ switch (root_index) {
+#define CASE(name) \
+ case Heap::k##name##RootIndex: \
+ return true;
+ IMMORTAL_IMMOVABLE_ROOT_LIST(CASE);
+#undef CASE
+ default:
+ return false;
+ }
+}
#ifdef VERIFY_HEAP
=======================================
--- /branches/bleeding_edge/src/heap/heap.h Thu Oct 23 15:16:27 2014 UTC
+++ /branches/bleeding_edge/src/heap/heap.h Fri Oct 24 12:40:05 2014 UTC
@@ -198,58 +198,6 @@
SMI_ROOT_LIST(V) \
V(StringTable, string_table, StringTable)
-// Heap roots that are known to be immortal immovable, for which we can
safely
-// skip write barriers.
-#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
- V(byte_array_map) \
- V(free_space_map) \
- V(one_pointer_filler_map) \
- V(two_pointer_filler_map) \
- V(undefined_value) \
- V(the_hole_value) \
- V(null_value) \
- V(true_value) \
- V(false_value) \
- V(uninitialized_value) \
- V(cell_map) \
- V(global_property_cell_map) \
- V(shared_function_info_map) \
- V(meta_map) \
- V(heap_number_map) \
- V(mutable_heap_number_map) \
- V(native_context_map) \
- V(fixed_array_map) \
- V(code_map) \
- V(scope_info_map) \
- V(fixed_cow_array_map) \
- V(fixed_double_array_map) \
- V(constant_pool_array_map) \
- V(weak_cell_map) \
- V(no_interceptor_result_sentinel) \
- V(hash_table_map) \
- V(ordered_hash_table_map) \
- V(empty_fixed_array) \
- V(empty_byte_array) \
- V(empty_descriptor_array) \
- V(empty_constant_pool_array) \
- V(arguments_marker) \
- V(symbol_map) \
- V(sloppy_arguments_elements_map) \
- V(function_context_map) \
- V(catch_context_map) \
- V(with_context_map) \
- V(block_context_map) \
- V(module_context_map) \
- V(global_context_map) \
- V(undefined_map) \
- V(the_hole_map) \
- V(null_map) \
- V(boolean_map) \
- V(uninitialized_map) \
- V(message_object_map) \
- V(foreign_map) \
- V(neander_map)
-
#define INTERNALIZED_STRING_LIST(V) \
V(Object_string, "Object") \
V(proto_string, "__proto__") \
@@ -351,6 +299,60 @@
V(class_start_position_symbol) \
V(class_end_position_symbol)
+// Heap roots that are known to be immortal immovable, for which we can
safely
+// skip write barriers. This list is not complete and has omissions.
+#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
+ V(ByteArrayMap) \
+ V(FreeSpaceMap) \
+ V(OnePointerFillerMap) \
+ V(TwoPointerFillerMap) \
+ V(UndefinedValue) \
+ V(TheHoleValue) \
+ V(NullValue) \
+ V(TrueValue) \
+ V(FalseValue) \
+ V(UninitializedValue) \
+ V(CellMap) \
+ V(GlobalPropertyCellMap) \
+ V(SharedFunctionInfoMap) \
+ V(MetaMap) \
+ V(HeapNumberMap) \
+ V(MutableHeapNumberMap) \
+ V(NativeContextMap) \
+ V(FixedArrayMap) \
+ V(CodeMap) \
+ V(ScopeInfoMap) \
+ V(FixedCOWArrayMap) \
+ V(FixedDoubleArrayMap) \
+ V(ConstantPoolArrayMap) \
+ V(WeakCellMap) \
+ V(NoInterceptorResultSentinel) \
+ V(HashTableMap) \
+ V(OrderedHashTableMap) \
+ V(EmptyFixedArray) \
+ V(EmptyByteArray) \
+ V(EmptyDescriptorArray) \
+ V(EmptyConstantPoolArray) \
+ V(ArgumentsMarker) \
+ V(SymbolMap) \
+ V(SloppyArgumentsElementsMap) \
+ V(FunctionContextMap) \
+ V(CatchContextMap) \
+ V(WithContextMap) \
+ V(BlockContextMap) \
+ V(ModuleContextMap) \
+ V(GlobalContextMap) \
+ V(UndefinedMap) \
+ V(TheHoleMap) \
+ V(NullMap) \
+ V(BooleanMap) \
+ V(UninitializedMap) \
+ V(ArgumentsMarkerMap) \
+ V(JSMessageObjectMap) \
+ V(ForeignMap) \
+ V(NeanderMap) \
+ PRIVATE_SYMBOL_LIST(V)
+
// Forward declarations.
class HeapStats;
class Isolate;
@@ -927,6 +929,8 @@
Address* store_buffer_top_address() {
return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
}
+
+ static bool RootIsImmortalImmovable(int root_index);
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
@@ -1115,6 +1119,8 @@
kStrongRootListLength = kStringTableRootIndex,
kSmiRootsStart = kStringTableRootIndex + 1
};
+
+ Object* root(RootListIndex index) { return roots_[index]; }
STATIC_ASSERT(kUndefinedValueRootIndex ==
Internals::kUndefinedValueRootIndex);
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.cc Thu Oct 2
10:52:12 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen-instructions.cc Fri Oct 24
12:40:05 2014 UTC
@@ -2862,7 +2862,7 @@
DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
return
#define IMMORTAL_IMMOVABLE_ROOT(name) \
- object_.IsKnownGlobal(heap->name()) ||
+ object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
#undef IMMORTAL_IMMOVABLE_ROOT
#define INTERNALIZED_STRING(name, value) \
@@ -2873,9 +2873,6 @@
object_.IsKnownGlobal(heap->name##_map()) ||
STRING_TYPE_LIST(STRING_TYPE)
#undef STRING_TYPE
-#define SYMBOL(name) object_.IsKnownGlobal(heap->name()) ||
- PRIVATE_SYMBOL_LIST(SYMBOL)
-#undef SYMBOL
false;
}
=======================================
--- /branches/bleeding_edge/src/serialize.cc Fri Oct 24 08:37:03 2014 UTC
+++ /branches/bleeding_edge/src/serialize.cc Fri Oct 24 12:40:05 2014 UTC
@@ -1677,11 +1677,10 @@
while (current < end && !(*current)->IsSmi()) {
HeapObject* current_contents = HeapObject::cast(*current);
int root_index =
serializer_->root_index_map()->Lookup(current_contents);
- // Repeats are not subject to the write barrier so there are only
some
- // objects that can be used in a repeat encoding. These are the
early
- // ones in the root array that are never in new space.
+ // Repeats are not subject to the write barrier so we can only use
+ // immortal immovable root members. They are never in new space.
if (current != start && root_index !=
RootIndexMap::kInvalidRootIndex &&
- root_index < kRootArrayNumberOfConstantEncodings &&
+ Heap::RootIsImmortalImmovable(root_index) &&
current_contents == current[-1]) {
DCHECK(!serializer_->isolate()->heap()->InNewSpace(current_contents));
int repeat_count = 1;
@@ -1908,7 +1907,7 @@
// Large objects are allocated one-by-one when deserializing. We do not
// have to keep track of multiple chunks.
pending_chunk_[LO_SPACE] += size;
- return BackReference(LO_SPACE, 0, seen_large_objects_index_++);
+ return BackReference::LargeObjectReference(seen_large_objects_index_++);
}
@@ -1925,7 +1924,8 @@
}
uint32_t offset = pending_chunk_[space];
pending_chunk_[space] = new_chunk_size;
- return BackReference(space, completed_chunks_[space].length(), offset);
+ return BackReference::Reference(space, completed_chunks_[space].length(),
+ offset);
}
@@ -2007,12 +2007,17 @@
BackReference back_reference = back_reference_map_.Lookup(obj);
if (back_reference.is_valid()) {
- if (FLAG_trace_code_serializer) {
- PrintF(" Encoding back reference to: ");
- obj->ShortPrint();
- PrintF("\n");
+ if (back_reference.is_source()) {
+ DCHECK_EQ(source_, obj);
+ SerializeSourceObject(how_to_code, where_to_point);
+ } else {
+ if (FLAG_trace_code_serializer) {
+ PrintF(" Encoding back reference to: ");
+ obj->ShortPrint();
+ PrintF("\n");
+ }
+ SerializeBackReference(back_reference, how_to_code, where_to_point,
skip);
}
- SerializeBackReference(back_reference, how_to_code, where_to_point,
skip);
return;
}
@@ -2055,11 +2060,6 @@
}
UNREACHABLE();
}
-
- if (obj == source_) {
- SerializeSourceObject(how_to_code, where_to_point);
- return;
- }
// Past this point we should not see any (context-specific) maps anymore.
CHECK(!obj->IsMap());
=======================================
--- /branches/bleeding_edge/src/serialize.h Fri Oct 24 08:37:03 2014 UTC
+++ /branches/bleeding_edge/src/serialize.h Fri Oct 24 12:40:05 2014 UTC
@@ -189,16 +189,26 @@
public:
explicit BackReference(uint32_t bitfield) : bitfield_(bitfield) {}
- BackReference(AllocationSpace space, uint32_t chunk_index,
- uint32_t chunk_offset) {
+ BackReference() : bitfield_(kInvalidValue) {}
+
+ static BackReference SourceReference() { return
BackReference(kSourceValue); }
+
+ static BackReference LargeObjectReference(uint32_t index) {
+ return BackReference(SpaceBits::encode(LO_SPACE) |
+ ChunkOffsetBits::encode(index));
+ }
+
+ static BackReference Reference(AllocationSpace space, uint32_t
chunk_index,
+ uint32_t chunk_offset) {
DCHECK(IsAligned(chunk_offset, kObjectAlignment));
- bitfield_ = SpaceBits::encode(space) |
ChunkIndexBits::encode(chunk_index) |
- ChunkOffsetBits::encode(chunk_offset >>
kObjectAlignmentBits);
+ DCHECK_NE(LO_SPACE, space);
+ return BackReference(
+ SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
+ ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
}
-
- BackReference() : bitfield_(kInvalidValue) {}
bool is_valid() const { return bitfield_ != kInvalidValue; }
+ bool is_source() const { return bitfield_ == kSourceValue; }
AllocationSpace space() const {
DCHECK(is_valid());
@@ -224,6 +234,7 @@
private:
static const uint32_t kInvalidValue = 0xFFFFFFFF;
+ static const uint32_t kSourceValue = 0xFFFFFFFE;
static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
@@ -262,6 +273,10 @@
HashMap::Entry* entry = LookupEntry(map_, obj, true);
SetValue(entry, b.bitfield());
}
+
+ void AddSourceString(String* string) {
+ Add(string, BackReference::SourceReference());
+ }
private:
DisallowHeapAllocation no_allocation_;
@@ -700,7 +715,9 @@
: Serializer(isolate, sink),
source_(source),
main_code_(main_code),
- num_internalized_strings_(0) {}
+ num_internalized_strings_(0) {
+ back_reference_map_.AddSourceString(source);
+ }
virtual void SerializeObject(HeapObject* o, HowToCode how_to_code,
WhereToPoint where_to_point, int skip);
=======================================
--- /branches/bleeding_edge/test/cctest/test-serialize.cc Fri Oct 24
08:37:03 2014 UTC
+++ /branches/bleeding_edge/test/cctest/test-serialize.cc Fri Oct 24
12:40:05 2014 UTC
@@ -867,6 +867,8 @@
CHECK_EQ(6 * 1000000, Handle<String>::cast(copy_result)->length());
CHECK(isolate->heap()->InSpace(HeapObject::cast(*copy_result),
LO_SPACE));
+ // Make sure we do not serialize too much, e.g. include the source
string.
+ CHECK_LT(cache->length(), 7000000);
delete cache;
source.Dispose();
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.