Revision: 10458
Author: [email protected]
Date: Fri Jan 20 05:43:21 2012
Log: Fix keyed lookup cache to have 2 entried per bucket instead
of one in order to reduce collisions.
Review URL: https://chromiumcodereview.appspot.com/9269004
http://code.google.com/p/v8/source/detail?r=10458
Modified:
/branches/bleeding_edge/src/arm/ic-arm.cc
/branches/bleeding_edge/src/heap.cc
/branches/bleeding_edge/src/heap.h
/branches/bleeding_edge/src/ia32/ic-ia32.cc
/branches/bleeding_edge/src/mips/ic-mips.cc
/branches/bleeding_edge/src/x64/ic-x64.cc
=======================================
--- /branches/bleeding_edge/src/arm/ic-arm.cc Wed Dec 14 04:46:32 2011
+++ /branches/bleeding_edge/src/arm/ic-arm.cc Fri Jan 20 05:43:21 2012
@@ -1031,14 +1031,25 @@
__ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
__ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
__ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
- __ And(r3, r3, Operand(KeyedLookupCache::kCapacityMask));
+ int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
+ __ And(r3, r3, Operand(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label try_second_entry, hit_on_first_entry, load_in_object_property;
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
__ mov(r4, Operand(cache_keys));
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
+ // Move r4 to second entry.
+ __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
+ __ cmp(r2, r5);
+ __ b(ne, &try_second_entry);
+ __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
+ __ cmp(r0, r5);
+ __ b(eq, &hit_on_first_entry);
+
+ __ bind(&try_second_entry);
__ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to
symbol.
__ cmp(r2, r5);
__ b(ne, &slow);
@@ -1053,13 +1064,26 @@
// r3 : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
+
+ // Hit on second entry.
__ mov(r4, Operand(cache_field_offsets));
+ __ add(r3, r3, Operand(1));
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
__ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
__ sub(r5, r5, r6, SetCC);
__ b(ge, &property_array_property);
+ __ jmp(&load_in_object_property);
+
+ // Hit on first entry.
+ __ bind(&hit_on_first_entry);
+ __ mov(r4, Operand(cache_field_offsets));
+ __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
+ __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
+ __ sub(r5, r5, r6, SetCC);
+ __ b(ge, &property_array_property);
// Load in-object property.
+ __ bind(&load_in_object_property);
__ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ add(r6, r6, r5); // Index from start of object.
__ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Jan 18 08:16:11 2012
+++ /branches/bleeding_edge/src/heap.cc Fri Jan 20 05:43:21 2012
@@ -6515,11 +6515,17 @@
int KeyedLookupCache::Lookup(Map* map, String* name) {
- int index = Hash(map, name);
+ int index = (Hash(map, name) & kHashMask);
Key& key = keys_[index];
if ((key.map == map) && key.name->Equals(name)) {
return field_offsets_[index];
}
+ ASSERT(kEntriesPerBucket == 2); // There are two entries to check.
+ // First entry in the bucket missed, check the second.
+ Key& key2 = keys_[index + 1];
+ if ((key2.map == map) && key2.name->Equals(name)) {
+ return field_offsets_[index + 1];
+ }
return kNotFound;
}
@@ -6527,8 +6533,14 @@
void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
String* symbol;
if (HEAP->LookupSymbolIfExists(name, &symbol)) {
- int index = Hash(map, symbol);
+ int index = (Hash(map, symbol) & kHashMask);
Key& key = keys_[index];
+ Key& key2 = keys_[index + 1]; // Second entry in the bucket.
+ // Demote the first entry to the second in the bucket.
+ key2.map = key.map;
+ key2.name = key.name;
+ field_offsets_[index + 1] = field_offsets_[index];
+ // Write the new first entry.
key.map = map;
key.name = symbol;
field_offsets_[index] = field_offset;
=======================================
--- /branches/bleeding_edge/src/heap.h Wed Jan 18 08:16:11 2012
+++ /branches/bleeding_edge/src/heap.h Fri Jan 20 05:43:21 2012
@@ -2135,9 +2135,11 @@
// Clear the cache.
void Clear();
- static const int kLength = 64;
+ static const int kLength = 128;
static const int kCapacityMask = kLength - 1;
- static const int kMapHashShift = 2;
+ static const int kMapHashShift = 5;
+ static const int kHashMask = -2; // Zero the last bit.
+ static const int kEntriesPerBucket = 2;
static const int kNotFound = -1;
private:
=======================================
--- /branches/bleeding_edge/src/ia32/ic-ia32.cc Wed Dec 14 04:46:32 2011
+++ /branches/bleeding_edge/src/ia32/ic-ia32.cc Fri Jan 20 05:43:21 2012
@@ -473,7 +473,6 @@
Counters* counters = isolate->counters();
__ IncrementCounter(counters->keyed_load_generic_smi(), 1);
__ ret(0);
-
__ bind(&check_number_dictionary);
__ mov(ebx, eax);
__ SmiUntag(ebx);
@@ -535,13 +534,23 @@
__ mov(edi, FieldOperand(eax, String::kHashFieldOffset));
__ shr(edi, String::kHashShift);
__ xor_(ecx, edi);
- __ and_(ecx, KeyedLookupCache::kCapacityMask);
+ __ and_(ecx, KeyedLookupCache::kCapacityMask &
KeyedLookupCache::kHashMask);
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label try_second_entry, hit_on_first_entry, load_in_object_property;
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(masm->isolate());
__ mov(edi, ecx);
+ __ shl(edi, kPointerSizeLog2 + 1);
+ __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
+ __ j(not_equal, &try_second_entry);
+ __ add(edi, Immediate(kPointerSize));
+ __ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
+ __ j(equal, &hit_on_first_entry);
+
+ __ bind(&try_second_entry);
+ __ lea(edi, Operand(ecx, 1));
__ shl(edi, kPointerSizeLog2 + 1);
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
@@ -556,13 +565,26 @@
// ecx : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
+
+ // Hit on second entry.
+ __ add(ecx, Immediate(1));
__ mov(edi,
Operand::StaticArray(ecx, times_pointer_size,
cache_field_offsets));
__ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
__ sub(edi, ecx);
__ j(above_equal, &property_array_property);
+ __ jmp(&load_in_object_property);
+
+ // Hit on first entry.
+ __ bind(&hit_on_first_entry);
+ __ mov(edi,
+ Operand::StaticArray(ecx, times_pointer_size,
cache_field_offsets));
+ __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
+ __ sub(edi, ecx);
+ __ j(above_equal, &property_array_property);
// Load in-object property.
+ __ bind(&load_in_object_property);
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ add(ecx, edi);
__ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
=======================================
--- /branches/bleeding_edge/src/mips/ic-mips.cc Tue Dec 27 00:41:30 2011
+++ /branches/bleeding_edge/src/mips/ic-mips.cc Fri Jan 20 05:43:21 2012
@@ -1033,19 +1033,26 @@
__ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
__ sra(at, t0, String::kHashShift);
__ xor_(a3, a3, at);
- __ And(a3, a3, Operand(KeyedLookupCache::kCapacityMask));
+ int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
+ __ And(a3, a3, Operand(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label try_second_entry, hit_on_first_entry, load_in_object_property;
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
__ li(t0, Operand(cache_keys));
__ sll(at, a3, kPointerSizeLog2 + 1);
__ addu(t0, t0, at);
- __ lw(t1, MemOperand(t0)); // Move t0 to symbol.
- __ Addu(t0, t0, Operand(kPointerSize));
- __ Branch(&slow, ne, a2, Operand(t1));
__ lw(t1, MemOperand(t0));
+ __ Branch(&try_second_entry, ne, a2, Operand(t1));
+ __ lw(t1, MemOperand(t0, kPointerSize));
+ __ Branch(&hit_on_first_entry, eq, a0, Operand(t1));
+
+ __ bind(&try_second_entry);
+ __ lw(t1, MemOperand(t0, kPointerSize * 2));
+ __ Branch(&slow, ne, a2, Operand(t1));
+ __ lw(t1, MemOperand(t0, kPointerSize * 3));
__ Branch(&slow, ne, a0, Operand(t1));
// Get field offset.
@@ -1055,15 +1062,29 @@
// a3 : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
+
+ // Hit on second entry.
__ li(t0, Operand(cache_field_offsets));
__ sll(at, a3, kPointerSizeLog2);
__ addu(at, t0, at);
+ __ lw(t1, MemOperand(at, kPointerSize));
+ __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
+ __ Subu(t1, t1, t2);
+ __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
+ __ Branch(&load_in_object_property);
+
+ // Hit on first entry.
+ __ bind(&hit_on_first_entry);
+ __ li(t0, Operand(cache_field_offsets));
+ __ sll(at, a3, kPointerSizeLog2);
+ __ addu(at, t0, at);
__ lw(t1, MemOperand(at));
__ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
__ Subu(t1, t1, t2);
__ Branch(&property_array_property, ge, t1, Operand(zero_reg));
// Load in-object property.
+ __ bind(&load_in_object_property);
__ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
__ addu(t2, t2, t1); // Index from start of object.
__ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
=======================================
--- /branches/bleeding_edge/src/x64/ic-x64.cc Wed Dec 14 04:46:32 2011
+++ /branches/bleeding_edge/src/x64/ic-x64.cc Fri Jan 20 05:43:21 2012
@@ -462,30 +462,51 @@
__ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
__ shr(rdi, Immediate(String::kHashShift));
__ xor_(rcx, rdi);
- __ and_(rcx, Immediate(KeyedLookupCache::kCapacityMask));
+ int mask = (KeyedLookupCache::kCapacityMask &
KeyedLookupCache::kHashMask);
+ __ and_(rcx, Immediate(mask));
// Load the key (consisting of map and symbol) from the cache and
// check for match.
+ Label try_second_entry, hit_on_first_entry, load_in_object_property;
ExternalReference cache_keys
= ExternalReference::keyed_lookup_cache_keys(masm->isolate());
__ movq(rdi, rcx);
__ shl(rdi, Immediate(kPointerSizeLog2 + 1));
__ LoadAddress(kScratchRegister, cache_keys);
__ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, 0));
- __ j(not_equal, &slow);
+ __ j(not_equal, &try_second_entry);
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
+ __ j(equal, &hit_on_first_entry);
+
+ __ bind(&try_second_entry);
+ __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, kPointerSize * 2));
__ j(not_equal, &slow);
+ __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize * 3));
+ __ j(not_equal, &slow);
// Get field offset, which is a 32-bit integer.
ExternalReference cache_field_offsets
=
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
+
+ // Hit on second entry.
__ LoadAddress(kScratchRegister, cache_field_offsets);
+ __ addl(rcx, Immediate(1));
__ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
__ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
__ subq(rdi, rcx);
__ j(above_equal, &property_array_property);
+ __ jmp(&load_in_object_property);
+
+ // Hit on first entry.
+ __ bind(&hit_on_first_entry);
+ __ LoadAddress(kScratchRegister, cache_field_offsets);
+ __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
+ __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
+ __ subq(rdi, rcx);
+ __ j(above_equal, &property_array_property);
// Load in-object property.
+ __ bind(&load_in_object_property);
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ addq(rcx, rdi);
__ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev