Author: [email protected]
Date: Thu Jul 16 05:56:50 2009
New Revision: 2486

Modified:
    branches/bleeding_edge/src/arm/ic-arm.cc
    branches/bleeding_edge/src/arm/simulator-arm.cc
    branches/bleeding_edge/src/ia32/assembler-ia32.cc
    branches/bleeding_edge/src/ia32/assembler-ia32.h
    branches/bleeding_edge/src/ia32/ic-ia32.cc
    branches/bleeding_edge/src/objects.cc
    branches/bleeding_edge/src/objects.h
    branches/bleeding_edge/src/utils.h

Log:
Changed hash table to use more of the hash value when probing.

Review URL: http://codereview.chromium.org/155350

Modified: branches/bleeding_edge/src/arm/ic-arm.cc
==============================================================================
--- branches/bleeding_edge/src/arm/ic-arm.cc    (original)
+++ branches/bleeding_edge/src/arm/ic-arm.cc    Thu Jul 16 05:56:50 2009
@@ -103,16 +103,20 @@
    // Generate an unrolled loop that performs a few probes before
    // giving up. Measurements done on Gmail indicate that 2 probes
    // cover ~93% of loads from dictionaries.
-  static const int kProbes = 4;
-  for (int i = 0; i < kProbes; i++) {
-    // Compute the masked index: (hash + i + i * i) & mask.
+  static const uint32_t kProbes =
+      HashTable<StringDictionaryShape, String*>::kNofFastProbes;
+  static const uint32_t kShift =
+      HashTable<StringDictionaryShape, String*>::kHashRotateShift;
+
+  for (uint32_t i = 0; i < kProbes; i++) {
+    // Compute the masked index.
      __ ldr(t1, FieldMemOperand(r2, String::kLengthOffset));
      __ mov(t1, Operand(t1, LSR, String::kHashShift));
      if (i > 0) {
-      __ add(t1, t1, Operand(StringDictionary::GetProbeOffset(i)));
+      __ and_(t1, r3, Operand(t1, ROR, (kShift * i) % kBitsPerInt));
+    } else {
+      __ and_(t1, t1, Operand(r3));
      }
-    __ and_(t1, t1, Operand(r3));
-
      // Scale the index by multiplying by the element size.
      ASSERT(StringDictionary::kEntrySize == 3);
      __ add(t1, t1, Operand(t1, LSL, 1));  // t1 = t1 * 3

Modified: branches/bleeding_edge/src/arm/simulator-arm.cc
==============================================================================
--- branches/bleeding_edge/src/arm/simulator-arm.cc     (original)
+++ branches/bleeding_edge/src/arm/simulator-arm.cc     Thu Jul 16 05:56:50 2009
@@ -45,6 +45,7 @@
  using ::v8::internal::OS;
  using ::v8::internal::ReadLine;
  using ::v8::internal::DeleteArray;
+using ::v8::internal::RotateRight;

  // This macro provides a platform independent use of sscanf. The reason for
  // SScanF not being implemented in a platform independent was through
@@ -817,7 +818,12 @@
        }

        case ROR: {
-        UNIMPLEMENTED();
+        ASSERT(shift_amount > 0);
+        uint32_t uresult = static_cast<uint32_t>(result);
+        uresult = RotateRight(uresult, shift_amount - 1);
+        *carry_out = (uresult & 1) == 1;
+        uresult = RotateRight(uresult, 1);
+        result = static_cast<int32_t>(uresult);
          break;
        }


Modified: branches/bleeding_edge/src/ia32/assembler-ia32.cc
==============================================================================
--- branches/bleeding_edge/src/ia32/assembler-ia32.cc   (original)
+++ branches/bleeding_edge/src/ia32/assembler-ia32.cc   Thu Jul 16 05:56:50  
2009
@@ -1240,6 +1240,16 @@
  }


+void Assembler::ror(Register dst, uint32_t count) {
+  EnsureSpace ensure_space(this);
+  last_pc_ = pc_;
+  EMIT(0xC1);
+  emit_operand(ecx, Operand(dst));
+  ASSERT(count < 32);
+  EMIT(count);
+}
+
+
  void Assembler::bt(const Operand& dst, Register src) {
    EnsureSpace ensure_space(this);
    last_pc_ = pc_;

Modified: branches/bleeding_edge/src/ia32/assembler-ia32.h
==============================================================================
--- branches/bleeding_edge/src/ia32/assembler-ia32.h    (original)
+++ branches/bleeding_edge/src/ia32/assembler-ia32.h    Thu Jul 16 05:56:50  
2009
@@ -597,6 +597,9 @@
    void xor_(const Operand& src, Register dst);
    void xor_(const Operand& dst, const Immediate& x);

+  // Rotate dist right count times, asserts count < 32.
+  void ror(Register dst, uint32_t count);
+
    // Bit operations.
    void bt(const Operand& dst, Register src);
    void bts(const Operand& dst, Register src);

Modified: branches/bleeding_edge/src/ia32/ic-ia32.cc
==============================================================================
--- branches/bleeding_edge/src/ia32/ic-ia32.cc  (original)
+++ branches/bleeding_edge/src/ia32/ic-ia32.cc  Thu Jul 16 05:56:50 2009
@@ -97,15 +97,20 @@
    // Generate an unrolled loop that performs a few probes before
    // giving up. Measurements done on Gmail indicate that 2 probes
    // cover ~93% of loads from dictionaries.
-  static const int kProbes = 4;
    const int kElementsStartOffset =
        Array::kHeaderSize + StringDictionary::kElementsStartIndex *  
kPointerSize;
-  for (int i = 0; i < kProbes; i++) {
-    // Compute the masked index: (hash + i + i * i) & mask.
+
+  static const uint32_t kProbes =
+      HashTable<StringDictionaryShape, String*>::kNofFastProbes;
+  static const uint32_t kShift =
+      HashTable<StringDictionaryShape, String*>::kHashRotateShift;
+
+  for (uint32_t i = 0; i < kProbes; i++) {
+    // Compute the masked index.
      __ mov(r1, FieldOperand(name, String::kLengthOffset));
      __ shr(r1, String::kHashShift);
      if (i > 0) {
-      __ add(Operand(r1), Immediate(StringDictionary::GetProbeOffset(i)));
+      __ ror(r1, (kShift * i) % kBitsPerInt);
      }
      __ and_(r1, Operand(r2));


Modified: branches/bleeding_edge/src/objects.cc
==============================================================================
--- branches/bleeding_edge/src/objects.cc       (original)
+++ branches/bleeding_edge/src/objects.cc       Thu Jul 16 05:56:50 2009
@@ -6502,7 +6502,8 @@
  Object* HashTable<Shape, Key>::Allocate(
      int at_least_space_for) {
    int capacity = RoundUpToPowerOf2(at_least_space_for);
-  if (capacity < 4) capacity = 4;  // Guarantee min capacity.
+  static const int kMinCapacity = 16;
+  if (capacity < kMinCapacity) capacity = kMinCapacity;
    Object* obj = Heap::AllocateHashTable(EntryToIndex(capacity));
    if (!obj->IsFailure()) {
      HashTable::cast(obj)->SetNumberOfElements(0);
@@ -6516,26 +6517,25 @@
  // Find entry for key otherwise return -1.
  template<typename Shape, typename Key>
  int HashTable<Shape, Key>::FindEntry(Key key) {
-  uint32_t nof = NumberOfElements();
-  if (nof == 0) return kNotFound;  // Bail out if empty.
-
-  uint32_t capacity = Capacity();
+  uint32_t mask = Capacity() - 1;
    uint32_t hash = Shape::Hash(key);
-  uint32_t entry = GetProbe(hash, 0, capacity);

-  Object* element = KeyAt(entry);
-  uint32_t passed_elements = 0;
-  if (!element->IsNull()) {
-    if (!element->IsUndefined() && Shape::IsMatch(key, element)) return  
entry;
-    if (++passed_elements == nof) return kNotFound;
+  // For the first probes rotate the hash to ensure a proper spread.
+  for (uint32_t i = 0; i < kNofFastProbes; i++) {
+    int entry = hash & mask;
+    Object* element = KeyAt(entry);
+    if (element->IsUndefined()) return kNotFound;
+    if (!element->IsNull() && Shape::IsMatch(key, element)) return entry;
+    hash = RotateRight(hash, kHashRotateShift);
    }
-  for (uint32_t i = 1; !element->IsUndefined(); i++) {
-    entry = GetProbe(hash, i, capacity);
-    element = KeyAt(entry);
-    if (!element->IsNull()) {
-      if (!element->IsUndefined() && Shape::IsMatch(key, element)) return  
entry;
-      if (++passed_elements == nof) return kNotFound;
-    }
+
+
+  // In this unlikely event, do a linear scan.
+  for (uint32_t i = 1; i <= mask; i++) {
+    int entry = ++hash & mask;
+    Object* element = KeyAt(entry);
+    if (element->IsUndefined()) return kNotFound;
+    if (!element->IsNull() && Shape::IsMatch(key, element)) return entry;
    }
    return kNotFound;
  }
@@ -6579,15 +6579,23 @@

  template<typename Shape, typename Key>
  uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) {
-  uint32_t capacity = Capacity();
-  uint32_t entry = GetProbe(hash, 0, capacity);
-  Object* element = KeyAt(entry);
-
-  for (uint32_t i = 1; !(element->IsUndefined() || element->IsNull());  
i++) {
-    entry = GetProbe(hash, i, capacity);
+  uint32_t mask = Capacity() - 1;
+  int entry;
+  Object* element;
+
+  // For the first probes rotate the hash to ensure a proper spread.
+  for (uint32_t i = 0; i < kNofFastProbes; i++) {
+    entry = hash & mask;
      element = KeyAt(entry);
+    if (element->IsUndefined() || element->IsNull()) return entry;
+    hash = RotateRight(hash, kHashRotateShift);
    }

+  do {
+    entry = ++hash & mask;
+    element = KeyAt(entry);
+  } while(!(element->IsUndefined() || element->IsNull()));
+
    return entry;
  }

@@ -6666,6 +6674,10 @@
  template
  int Dictionary<StringDictionaryShape, String*>::NumberOfEnumElements();

+template
+int HashTable<NumberDictionaryShape, uint32_t>::FindEntry(uint32_t key);
+
+
  // Collates undefined and unexisting elements below limit from position
  // zero of the elements. The object stays in Dictionary mode.
  Object* JSObject::PrepareSlowElementsForSort(uint32_t limit) {
@@ -7021,7 +7033,7 @@
    uint32_t HashForObject(Object* obj) {
      FixedArray* symbols = FixedArray::cast(obj);
      int len = symbols->length();
-    uint32_t hash = 0;
+    uint32_t hash = 40617523; // In case the array is empty.
      for (int i = 0; i < len; i++) {
        hash ^= String::cast(symbols->get(i))->Hash();
      }

Modified: branches/bleeding_edge/src/objects.h
==============================================================================
--- branches/bleeding_edge/src/objects.h        (original)
+++ branches/bleeding_edge/src/objects.h        Thu Jul 16 05:56:50 2009
@@ -2051,11 +2051,6 @@
    // Casting.
    static inline HashTable* cast(Object* obj);

-  // Compute the probe offset (quadratic probing).
-  INLINE(static uint32_t GetProbeOffset(uint32_t n)) {
-    return (n + n * n) >> 1;
-  }
-
    static const int kNumberOfElementsIndex = 0;
    static const int kCapacityIndex         = 1;
    static const int kPrefixStartIndex      = 2;
@@ -2071,6 +2066,9 @@
    // Find entry for key otherwise return -1.
    int FindEntry(Key key);

+  static const uint32_t kNofFastProbes = 4;
+  static const uint32_t kHashRotateShift = 3;
+
   protected:

    // Find the entry at which to insert element with the given key that
@@ -2094,13 +2092,6 @@
      // and non-zero.
      ASSERT(capacity > 0);
      fast_set(this, kCapacityIndex, Smi::FromInt(capacity));
-  }
-
-
-  // Returns probe entry.
-  static uint32_t GetProbe(uint32_t hash, uint32_t number, uint32_t size) {
-    ASSERT(IsPowerOf2(size));
-    return (hash + GetProbeOffset(number)) & (size - 1);
    }

    // Ensure enough space for n additional elements.

Modified: branches/bleeding_edge/src/utils.h
==============================================================================
--- branches/bleeding_edge/src/utils.h  (original)
+++ branches/bleeding_edge/src/utils.h  Thu Jul 16 05:56:50 2009
@@ -212,6 +212,21 @@


  //  
----------------------------------------------------------------------------
+// Bitwise rotate word
+
+inline uint32_t RotateRight(uint32_t value, uint32_t n) {
+  ASSERT(n < 31);
+  return (value >> n) | (value << (32-n));
+}
+
+
+inline uint32_t RotateLeft(uint32_t value, uint32_t n) {
+  ASSERT(n < 31);
+  return (value << n) | (value >> (32-n));
+}
+
+
+//  
----------------------------------------------------------------------------
  // I/O support.

  // Our version of printf(). Avoids compilation errors that we get

--~--~---------~--~----~------------~-------~--~----~
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
-~----------~----~----~----~------~----~------~--~---

Reply via email to