Revision: 21774
Author:   [email protected]
Date:     Wed Jun 11 14:56:38 2014 UTC
Log:      Generate KeyedLoadGeneric with Hydrogen

[email protected]

Review URL: https://codereview.chromium.org/57123002
http://code.google.com/p/v8/source/detail?r=21774

Added:
 /branches/bleeding_edge/test/mjsunit/keyed-load-dictionary-stub.js
Modified:
 /branches/bleeding_edge/src/arm/code-stubs-arm.cc
 /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc
 /branches/bleeding_edge/src/code-stubs-hydrogen.cc
 /branches/bleeding_edge/src/code-stubs.cc
 /branches/bleeding_edge/src/code-stubs.h
 /branches/bleeding_edge/src/elements-kind.cc
 /branches/bleeding_edge/src/field-index-inl.h
 /branches/bleeding_edge/src/field-index.h
 /branches/bleeding_edge/src/flag-definitions.h
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/hydrogen-instructions.h
 /branches/bleeding_edge/src/hydrogen.cc
 /branches/bleeding_edge/src/hydrogen.h
 /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc
 /branches/bleeding_edge/src/ic.cc
 /branches/bleeding_edge/src/ic.h
 /branches/bleeding_edge/src/isolate.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/runtime.cc
 /branches/bleeding_edge/src/serialize.h
 /branches/bleeding_edge/src/x64/code-stubs-x64.cc

=======================================
--- /dev/null
+++ /branches/bleeding_edge/test/mjsunit/keyed-load-dictionary-stub.js Wed Jun 11 14:56:38 2014 UTC
@@ -0,0 +1,20 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags: --allow-natives-syntax
+
+function generate_dictionary_array() {
+  var result = [0, 1, 2, 3, 4];
+  result[256 * 1024] = 5;
+  return result;
+}
+
+function get_accessor(a, i) {
+  return a[i];
+}
+
+var array1 = generate_dictionary_array();
+get_accessor(array1, 1);
+get_accessor(array1, 2);
+get_accessor(12345, 2);
=======================================
--- /branches/bleeding_edge/src/arm/code-stubs-arm.cc Wed Jun 11 06:59:25 2014 UTC +++ /branches/bleeding_edge/src/arm/code-stubs-arm.cc Wed Jun 11 14:56:38 2014 UTC
@@ -116,6 +116,16 @@
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
 }
+
+
+void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { r1, r0 };
+  descriptor->register_param_count_ = 2;
+  descriptor->register_params_ = registers;
+  descriptor->deoptimization_handler_ =
+      Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
+}


 void LoadFieldStub::InitializeInterfaceDescriptor(
=======================================
--- /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Tue Jun 3 08:12:43 2014 UTC +++ /branches/bleeding_edge/src/arm64/code-stubs-arm64.cc Wed Jun 11 14:56:38 2014 UTC
@@ -99,6 +99,16 @@
   descriptor->register_params_ = registers;
   descriptor->deoptimization_handler_ = NULL;
 }
+
+
+void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { x1, x0 };
+  descriptor->register_param_count_ = 2;
+  descriptor->register_params_ = registers;
+  descriptor->deoptimization_handler_ =
+      Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
+}


 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
=======================================
--- /branches/bleeding_edge/src/code-stubs-hydrogen.cc Tue Jun 10 14:01:08 2014 UTC +++ /branches/bleeding_edge/src/code-stubs-hydrogen.cc Wed Jun 11 14:56:38 2014 UTC
@@ -1390,7 +1390,11 @@

   Add<HCheckSmi>(key);

-  return BuildUncheckedDictionaryElementLoad(receiver, key);
+  HValue* elements = AddLoadElements(receiver);
+
+  HValue* hash = BuildElementIndexHash(key);
+
+ return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
 }


@@ -1415,6 +1419,306 @@
 Handle<Code> RegExpConstructResultStub::GenerateCode() {
   return DoGenerateCode(this);
 }
+
+
+template <>
+class CodeStubGraphBuilder<KeyedLoadGenericElementStub>
+  : public CodeStubGraphBuilderBase {
+ public:
+  CodeStubGraphBuilder(Isolate* isolate,
+                       KeyedLoadGenericElementStub* stub)
+    : CodeStubGraphBuilderBase(isolate, stub) {}
+
+ protected:
+  virtual HValue* BuildCodeStub();
+
+  void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
+                                   HValue* bit_field2,
+                                   ElementsKind kind);
+
+  void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
+                            HValue* receiver,
+                            HValue* key,
+                            HValue* instance_type,
+                            HValue* bit_field2,
+                            ElementsKind kind);
+
+  void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
+                                HValue* receiver,
+                                HValue* key,
+                                HValue* instance_type,
+                                HValue* bit_field2,
+                                ElementsKind kind);
+
+  KeyedLoadGenericElementStub* casted_stub() {
+    return static_cast<KeyedLoadGenericElementStub*>(stub());
+  }
+};
+
+
+void CodeStubGraphBuilder<
+  KeyedLoadGenericElementStub>::BuildElementsKindLimitCheck(
+    HGraphBuilder::IfBuilder* if_builder,
+    HValue* bit_field2,
+    ElementsKind kind) {
+  ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
+  HValue* kind_limit = Add<HConstant>(
+      static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
+
+ if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
+  if_builder->Then();
+}
+
+
+void CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildFastElementLoad(
+    HGraphBuilder::IfBuilder* if_builder,
+    HValue* receiver,
+    HValue* key,
+    HValue* instance_type,
+    HValue* bit_field2,
+    ElementsKind kind) {
+  ASSERT(!IsExternalArrayElementsKind(kind));
+
+  BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
+
+  IfBuilder js_array_check(this);
+  js_array_check.If<HCompareNumericAndBranch>(
+      instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
+  js_array_check.Then();
+  Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
+                                              true, kind,
+                                              LOAD, NEVER_RETURN_HOLE,
+                                              STANDARD_STORE));
+  js_array_check.Else();
+  Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
+                                              false, kind,
+                                              LOAD, NEVER_RETURN_HOLE,
+                                              STANDARD_STORE));
+  js_array_check.End();
+}
+
+
+void CodeStubGraphBuilder<
+  KeyedLoadGenericElementStub>::BuildExternalElementLoad(
+    HGraphBuilder::IfBuilder* if_builder,
+    HValue* receiver,
+    HValue* key,
+    HValue* instance_type,
+    HValue* bit_field2,
+    ElementsKind kind) {
+  ASSERT(IsExternalArrayElementsKind(kind));
+
+  BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
+
+  Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
+                                              false, kind,
+                                              LOAD, NEVER_RETURN_HOLE,
+                                              STANDARD_STORE));
+}
+
+
+HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() {
+  HValue* receiver = GetParameter(0);
+  HValue* key = GetParameter(1);
+
+  // Split into a smi/integer case and unique string case.
+ HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(), + graph()->CreateBasicBlock());
+
+  BuildKeyedIndexCheck(key, &index_name_split_continuation);
+
+  IfBuilder index_name_split(this, &index_name_split_continuation);
+  index_name_split.Then();
+  {
+    // Key is an index (number)
+    key = Pop();
+
+    int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
+      (1 << Map::kHasIndexedInterceptor);
+    BuildJSObjectCheck(receiver, bit_field_mask);
+
+ HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
+                                       HObjectAccess::ForMap());
+
+    HValue* instance_type =
+      Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
+                           HObjectAccess::ForMapInstanceType());
+
+    HValue* bit_field2 = Add<HLoadNamedField>(map,
+                                              static_cast<HValue*>(NULL),
+ HObjectAccess::ForMapBitField2());
+
+    IfBuilder kind_if(this);
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                         FAST_HOLEY_ELEMENTS);
+
+    kind_if.Else();
+    {
+ BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                           FAST_HOLEY_DOUBLE_ELEMENTS);
+    }
+    kind_if.Else();
+
+    // The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
+    BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
+    {
+      HValue* elements = AddLoadElements(receiver);
+
+      HValue* hash = BuildElementIndexHash(key);
+
+ Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
+    }
+    kind_if.Else();
+
+    // The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
+    BuildElementsKindLimitCheck(&kind_if, bit_field2,
+                                SLOPPY_ARGUMENTS_ELEMENTS);
+    // Non-strict elements are not handled.
+    Add<HDeoptimize>("non-strict elements in KeyedLoadGenericElementStub",
+                     Deoptimizer::EAGER);
+    Push(graph()->GetConstant0());
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_INT8_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_UINT8_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_INT16_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_UINT16_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_INT32_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_UINT32_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_FLOAT32_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_FLOAT64_ELEMENTS);
+
+    kind_if.Else();
+ BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
+                             EXTERNAL_UINT8_CLAMPED_ELEMENTS);
+
+ kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericElementStub");
+
+    kind_if.End();
+  }
+  index_name_split.Else();
+  {
+    // Key is a unique string.
+    key = Pop();
+
+    int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
+        (1 << Map::kHasNamedInterceptor);
+    BuildJSObjectCheck(receiver, bit_field_mask);
+
+    HIfContinuation continuation;
+    BuildTestForDictionaryProperties(receiver, &continuation);
+    IfBuilder if_dict_properties(this, &continuation);
+    if_dict_properties.Then();
+    {
+      //  Key is string, properties are dictionary mode
+      BuildNonGlobalObjectCheck(receiver);
+
+      HValue* properties = Add<HLoadNamedField>(
+          receiver, static_cast<HValue*>(NULL),
+          HObjectAccess::ForPropertiesPointer());
+
+      HValue* hash =
+          Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
+          HObjectAccess::ForNameHashField());
+
+      HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
+                                                          properties,
+                                                          key,
+                                                          hash);
+      Push(value);
+    }
+    if_dict_properties.Else();
+    {
+      //  Key is string, properties are fast mode
+      HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
+
+      ExternalReference cache_keys_ref =
+          ExternalReference::keyed_lookup_cache_keys(isolate());
+      HValue* cache_keys = Add<HConstant>(cache_keys_ref);
+
+ HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
+                                         HObjectAccess::ForMap());
+      HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
+      base_index->ClearFlag(HValue::kCanOverflow);
+
+      IfBuilder lookup_if(this);
+      for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
+           ++probe) {
+        int probe_base = probe * KeyedLookupCache::kEntryLength;
+        HValue* map_index = AddUncasted<HAdd>(base_index,
+            Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
+        map_index->ClearFlag(HValue::kCanOverflow);
+        HValue* key_index = AddUncasted<HAdd>(base_index,
+            Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
+        key_index->ClearFlag(HValue::kCanOverflow);
+        HValue* map_to_check = Add<HLoadKeyed>(cache_keys,
+                                               map_index,
+                                               static_cast<HValue*>(NULL),
+                                               FAST_ELEMENTS,
+                                               NEVER_RETURN_HOLE, 0);
+        lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map);
+        lookup_if.And();
+        HValue* key_to_check = Add<HLoadKeyed>(cache_keys,
+                                               key_index,
+                                               static_cast<HValue*>(NULL),
+                                               FAST_ELEMENTS,
+                                               NEVER_RETURN_HOLE, 0);
+        lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key);
+        lookup_if.Then();
+        {
+          ExternalReference cache_field_offsets_ref =
+ ExternalReference::keyed_lookup_cache_field_offsets(isolate()); + HValue* cache_field_offsets = Add<HConstant>(cache_field_offsets_ref);
+          HValue* index = AddUncasted<HAdd>(hash,
+                                            Add<HConstant>(probe));
+          index->ClearFlag(HValue::kCanOverflow);
+          HValue* property_index = Add<HLoadKeyed>(cache_field_offsets,
+                                                   index,
+ static_cast<HValue*>(NULL),
+                                                   EXTERNAL_INT32_ELEMENTS,
+                                                   NEVER_RETURN_HOLE, 0);
+          Push(property_index);
+        }
+        lookup_if.Else();
+      }
+      Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER);
+      Push(graph()->GetConstant0());
+      lookup_if.End();
+      Push(Add<HLoadFieldByIndex>(receiver, Pop()));
+    }
+    if_dict_properties.End();
+  }
+  index_name_split.End();
+
+  return Pop();
+}
+
+
+Handle<Code> KeyedLoadGenericElementStub::GenerateCode() {
+  return DoGenerateCode(this);
+}


 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/code-stubs.cc   Tue Jun  3 08:12:43 2014 UTC
+++ /branches/bleeding_edge/src/code-stubs.cc   Wed Jun 11 14:56:38 2014 UTC
@@ -762,6 +762,13 @@
   RegExpConstructResultStub stub(isolate);
   InstallDescriptor(isolate, &stub);
 }
+
+
+// static
+void KeyedLoadGenericElementStub::InstallDescriptors(Isolate* isolate) {
+  KeyedLoadGenericElementStub stub(isolate);
+  InstallDescriptor(isolate, &stub);
+}


 ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
=======================================
--- /branches/bleeding_edge/src/code-stubs.h    Tue Jun 10 14:01:08 2014 UTC
+++ /branches/bleeding_edge/src/code-stubs.h    Wed Jun 11 14:56:38 2014 UTC
@@ -53,6 +53,7 @@
   V(CEntry)                              \
   V(JSEntry)                             \
   V(KeyedLoadElement)                    \
+  V(KeyedLoadGeneric)                    \
   V(ArrayNoArgumentConstructor)          \
   V(ArraySingleArgumentConstructor)      \
   V(ArrayNArgumentsConstructor)          \
@@ -1845,6 +1846,29 @@
 };


+class KeyedLoadGenericElementStub : public HydrogenCodeStub {
+ public:
+  explicit KeyedLoadGenericElementStub(Isolate *isolate)
+      : HydrogenCodeStub(isolate) {}
+
+  virtual Handle<Code> GenerateCode() V8_OVERRIDE;
+
+  virtual void InitializeInterfaceDescriptor(
+      CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
+
+  static void InstallDescriptors(Isolate* isolate);
+
+  virtual Code::Kind GetCodeKind() const { return Code::KEYED_LOAD_IC; }
+  virtual InlineCacheState GetICState() { return GENERIC; }
+
+ private:
+  Major MajorKey() { return KeyedLoadGeneric; }
+  int NotMissMinorKey() { return 0; }
+
+  DISALLOW_COPY_AND_ASSIGN(KeyedLoadGenericElementStub);
+};
+
+
 class DoubleToIStub : public PlatformCodeStub {
  public:
   DoubleToIStub(Isolate* isolate,
=======================================
--- /branches/bleeding_edge/src/elements-kind.cc Thu Jun 5 12:14:47 2014 UTC +++ /branches/bleeding_edge/src/elements-kind.cc Wed Jun 11 14:56:38 2014 UTC
@@ -53,8 +53,9 @@


 int GetDefaultHeaderSizeForElementsKind(ElementsKind elements_kind) {
+  STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
   return IsExternalArrayElementsKind(elements_kind)
-      ? 0 : (FixedArray::kHeaderSize - kSmiTagSize);
+      ? 0 : (FixedArray::kHeaderSize - kHeapObjectTag);
 }


=======================================
--- /branches/bleeding_edge/src/field-index-inl.h Tue Jun 10 14:01:08 2014 UTC +++ /branches/bleeding_edge/src/field-index-inl.h Wed Jun 11 14:56:38 2014 UTC
@@ -73,6 +73,24 @@
   return ForPropertyIndex(map, field_index,
                           details.representation().IsDouble());
 }
+
+
+inline FieldIndex FieldIndex::ForKeyedLookupCacheIndex(Map* map, int index) {
+  if (FLAG_compiled_keyed_generic_loads) {
+    return ForLoadByFieldIndex(map, index);
+  } else {
+    return ForPropertyIndex(map, index);
+  }
+}
+
+
+inline int FieldIndex::GetKeyedLookupCacheIndex() const {
+  if (FLAG_compiled_keyed_generic_loads) {
+    return GetLoadByFieldIndex();
+  } else {
+    return property_index();
+  }
+}


 } }  // namespace v8::internal
=======================================
--- /branches/bleeding_edge/src/field-index.h   Tue Jun 10 14:01:08 2014 UTC
+++ /branches/bleeding_edge/src/field-index.h   Wed Jun 11 14:56:38 2014 UTC
@@ -26,9 +26,7 @@
   static FieldIndex ForLookupResult(const LookupResult* result);
   static FieldIndex ForDescriptor(Map* map, int descriptor_index);
   static FieldIndex ForLoadByFieldIndex(Map* map, int index);
-  static FieldIndex ForKeyedLookupCacheIndex(Map* map, int index) {
-    return ForPropertyIndex(map, index);
-  }
+  static FieldIndex ForKeyedLookupCacheIndex(Map* map, int index);

   bool is_inobject() const {
     return IsInObjectBits::decode(bit_field_);
@@ -75,9 +73,7 @@
     return is_double() ? (result | 1) : result;
   }

-  int GetKeyedLookupCacheIndex() const {
-    return property_index();
-  }
+  int GetKeyedLookupCacheIndex() const;

   int GetLoadFieldStubKey() const {
     return bit_field_ &
=======================================
--- /branches/bleeding_edge/src/flag-definitions.h Tue Jun 10 10:51:33 2014 UTC +++ /branches/bleeding_edge/src/flag-definitions.h Wed Jun 11 14:56:38 2014 UTC
@@ -190,6 +190,8 @@
 DEFINE_bool(smi_only_arrays, true, "tracks arrays with only smi values")
 DEFINE_bool(compiled_keyed_dictionary_loads, true,
"use optimizing compiler to generate keyed dictionary load stubs")
+DEFINE_bool(compiled_keyed_generic_loads, false,
+            "use optimizing compiler to generate keyed generic load stubs")
 DEFINE_bool(clever_optimizations, true,
             "Optimize object size, Array shift, DOM strings and string +")
 // TODO(hpayer): We will remove this flag as soon as we have pretenuring
=======================================
--- /branches/bleeding_edge/src/heap.h  Thu Jun  5 17:08:21 2014 UTC
+++ /branches/bleeding_edge/src/heap.h  Wed Jun 11 14:56:38 2014 UTC
@@ -2404,6 +2404,9 @@
   static const int kMapHashShift = 5;
   static const int kHashMask = -4;  // Zero the last two bits.
   static const int kEntriesPerBucket = 4;
+  static const int kEntryLength = 2;
+  static const int kMapIndex = 0;
+  static const int kKeyIndex = 1;
   static const int kNotFound = -1;

   // kEntriesPerBucket should be a power of 2.
=======================================
--- /branches/bleeding_edge/src/hydrogen-instructions.h Fri Jun 6 13:16:24 2014 UTC +++ /branches/bleeding_edge/src/hydrogen-instructions.h Wed Jun 11 14:56:38 2014 UTC
@@ -6048,6 +6048,23 @@
   static HObjectAccess ForMap() {
     return HObjectAccess(kMaps, JSObject::kMapOffset);
   }
+
+  static HObjectAccess ForMapAsInteger32() {
+    return HObjectAccess(kMaps, JSObject::kMapOffset,
+                         Representation::Integer32());
+  }
+
+  static HObjectAccess ForMapInObjectProperties() {
+    return HObjectAccess(kInobject,
+                         Map::kInObjectPropertiesOffset,
+                         Representation::UInteger8());
+  }
+
+  static HObjectAccess ForMapInstanceType() {
+    return HObjectAccess(kInobject,
+                         Map::kInstanceTypeOffset,
+                         Representation::UInteger8());
+  }

   static HObjectAccess ForMapInstanceSize() {
     return HObjectAccess(kInobject,
@@ -6055,10 +6072,30 @@
                          Representation::UInteger8());
   }

-  static HObjectAccess ForMapInstanceType() {
+  static HObjectAccess ForMapBitField() {
+    return HObjectAccess(kInobject,
+                         Map::kBitFieldOffset,
+                         Representation::UInteger8());
+  }
+
+  static HObjectAccess ForMapBitField2() {
+    return HObjectAccess(kInobject,
+                         Map::kBitField2Offset,
+                         Representation::UInteger8());
+  }
+
+  static HObjectAccess ForNameHashField() {
+    return HObjectAccess(kInobject,
+                         Name::kHashFieldOffset,
+                         Representation::Integer32());
+  }
+
+  static HObjectAccess ForMapInstanceTypeAndBitField() {
+    STATIC_ASSERT((Map::kInstanceTypeOffset & 1) == 0);
+    STATIC_ASSERT(Map::kBitFieldOffset == Map::kInstanceTypeOffset + 1);
     return HObjectAccess(kInobject,
                          Map::kInstanceTypeOffset,
-                         Representation::UInteger8());
+                         Representation::UInteger16());
   }

   static HObjectAccess ForPropertyCellValue() {
@@ -6453,6 +6490,10 @@
   bool HasDependency() const { return OperandAt(0) != OperandAt(2); }
   uint32_t base_offset() { return BaseOffsetField::decode(bit_field_); }
   void IncreaseBaseOffset(uint32_t base_offset) {
+ // The base offset is usually simply the size of the array header, except
+    // with dehoisting adds an addition offset due to a array index key
+    // manipulation, in which case it becomes (array header size +
+    // constant-offset-from-key * kPointerSize)
     base_offset += BaseOffsetField::decode(bit_field_);
     bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
   }
@@ -6465,7 +6506,7 @@
   void SetDehoisted(bool is_dehoisted) {
     bit_field_ = IsDehoistedField::update(bit_field_, is_dehoisted);
   }
-  ElementsKind elements_kind() const {
+  virtual ElementsKind elements_kind() const V8_OVERRIDE {
     return ElementsKindField::decode(bit_field_);
   }
   LoadKeyedHoleMode hole_mode() const {
@@ -6923,6 +6964,10 @@
   ElementsKind elements_kind() const { return elements_kind_; }
   uint32_t base_offset() { return base_offset_; }
   void IncreaseBaseOffset(uint32_t base_offset) {
+ // The base offset is usually simply the size of the array header, except
+    // with dehoisting adds an addition offset due to a array index key
+    // manipulation, in which case it becomes (array header size +
+    // constant-offset-from-key * kPointerSize)
     base_offset_ += base_offset;
   }
   virtual int MaxBaseOffsetBits() {
=======================================
--- /branches/bleeding_edge/src/hydrogen.cc     Tue Jun 10 09:01:45 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.cc     Wed Jun 11 14:56:38 2014 UTC
@@ -1228,6 +1228,16 @@
   header->AttachLoopInformation();
   return header;
 }
+
+
+HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
+  HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
+                                     HObjectAccess::ForMap());
+
+ HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), + HObjectAccess::ForMapBitField2());
+  return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
+}


 HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
@@ -1397,6 +1407,194 @@

   Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
 }
+
+
+void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
+                                       int bit_field_mask) {
+  // Check that the object isn't a smi.
+  Add<HCheckHeapObject>(receiver);
+
+  // Get the map of the receiver.
+  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
+                                     HObjectAccess::ForMap());
+
+  // Check the instance type and if an access check is needed, this can be
+  // done with a single load, since both bytes are adjacent in the map.
+  HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
+  HValue* instance_type_and_bit_field =
+      Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access);
+
+  HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
+  HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
+                                             instance_type_and_bit_field,
+                                             mask);
+  HValue* sub_result = AddUncasted<HSub>(and_result,
+                                         Add<HConstant>(JS_OBJECT_TYPE));
+  Add<HBoundsCheck>(sub_result, Add<HConstant>(0x100 - JS_OBJECT_TYPE));
+}
+
+
+void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
+ HIfContinuation* join_continuation) {
+  // The sometimes unintuitively backward ordering of the ifs below is
+  // convoluted, but necessary.  All of the paths must guarantee that the
+ // if-true of the continuation returns a smi element index and the if-false of + // the continuation returns either a symbol or a unique string key. All other
+  // object types cause a deopt to fall back to the runtime.
+
+  IfBuilder key_smi_if(this);
+  key_smi_if.If<HIsSmiAndBranch>(key);
+  key_smi_if.Then();
+  {
+    Push(key);  // Nothing to do, just continue to true of continuation.
+  }
+  key_smi_if.Else();
+  {
+    HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
+                                       HObjectAccess::ForMap());
+    HValue* instance_type =
+        Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
+                             HObjectAccess::ForMapInstanceType());
+
+ // Non-unique string, check for a string with a hash code that is actually
+    // an index.
+    STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
+    IfBuilder not_string_or_name_if(this);
+    not_string_or_name_if.If<HCompareNumericAndBranch>(
+        instance_type,
+        Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
+        Token::GT);
+
+    not_string_or_name_if.Then();
+    {
+      // Non-smi, non-Name, non-String: Try to convert to smi in case of
+      // HeapNumber.
+      // TODO(danno): This could call some variant of ToString
+      Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
+    }
+    not_string_or_name_if.Else();
+    {
+      // String or Name: check explicitly for Name, they can short-circuit
+      // directly to unique non-index key path.
+      IfBuilder not_symbol_if(this);
+      not_symbol_if.If<HCompareNumericAndBranch>(
+          instance_type,
+          Add<HConstant>(SYMBOL_TYPE),
+          Token::NE);
+
+      not_symbol_if.Then();
+      {
+ // String: check whether the String is a String of an index. If it is,
+        // extract the index value from the hash.
+        HValue* hash =
+            Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
+                                 HObjectAccess::ForNameHashField());
+        HValue* not_index_mask = Add<HConstant>(static_cast<int>(
+            String::kContainsCachedArrayIndexMask));
+
+        HValue* not_index_test = AddUncasted<HBitwise>(
+            Token::BIT_AND, hash, not_index_mask);
+
+        IfBuilder string_index_if(this);
+        string_index_if.If<HCompareNumericAndBranch>(not_index_test,
+ graph()->GetConstant0(),
+                                                     Token::EQ);
+        string_index_if.Then();
+        {
+ // String with index in hash: extract string and merge to index path.
+          Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
+        }
+        string_index_if.Else();
+        {
+ // Key is a non-index String, check for uniqueness/internalization. If
+          // it's not, deopt.
+          HValue* not_internalized_bit = AddUncasted<HBitwise>(
+              Token::BIT_AND,
+              instance_type,
+              Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
+          DeoptimizeIf<HCompareNumericAndBranch>(
+              not_internalized_bit,
+              graph()->GetConstant0(),
+              Token::NE,
+              "BuildKeyedIndexCheck: string isn't internalized");
+          // Key guaranteed to be a unqiue string
+          Push(key);
+        }
+        string_index_if.JoinContinuation(join_continuation);
+      }
+      not_symbol_if.Else();
+      {
+        Push(key);  // Key is symbol
+      }
+      not_symbol_if.JoinContinuation(join_continuation);
+    }
+    not_string_or_name_if.JoinContinuation(join_continuation);
+  }
+  key_smi_if.JoinContinuation(join_continuation);
+}
+
+
+void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
+  // Get the the instance type of the receiver, and make sure that it is
+  // not one of the global object types.
+  HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
+                                     HObjectAccess::ForMap());
+  HValue* instance_type =
+    Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
+                         HObjectAccess::ForMapInstanceType());
+  STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1);
+  HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
+  HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE);
+
+  IfBuilder if_global_object(this);
+  if_global_object.If<HCompareNumericAndBranch>(instance_type,
+                                                max_global_type,
+                                                Token::LTE);
+  if_global_object.And();
+  if_global_object.If<HCompareNumericAndBranch>(instance_type,
+                                                min_global_type,
+                                                Token::GTE);
+  if_global_object.ThenDeopt("receiver was a global object");
+  if_global_object.End();
+}
+
+
+void HGraphBuilder::BuildTestForDictionaryProperties(
+    HValue* object,
+    HIfContinuation* continuation) {
+  HValue* properties = Add<HLoadNamedField>(
+      object, static_cast<HValue*>(NULL),
+      HObjectAccess::ForPropertiesPointer());
+  HValue* properties_map =
+      Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL),
+                           HObjectAccess::ForMap());
+  HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
+  IfBuilder builder(this);
+  builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
+  builder.CaptureContinuation(continuation);
+}
+
+
+HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
+                                                 HValue* key) {
+  // Load the map of the receiver, compute the keyed lookup cache hash
+  // based on 32 bits of the map pointer and the string hash.
+  HValue* object_map =
+      Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
+                           HObjectAccess::ForMapAsInteger32());
+  HValue* shifted_map = AddUncasted<HShr>(
+      object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
+  HValue* string_hash =
+      Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
+                           HObjectAccess::ForStringHashField());
+  HValue* shifted_hash = AddUncasted<HShr>(
+      string_hash, Add<HConstant>(String::kHashShift));
+  HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
+                                             shifted_hash);
+ int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
+  return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
+                               Add<HConstant>(mask));
+}


 HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
@@ -1511,11 +1709,9 @@


HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
-                                                           HValue* key) {
-  HValue* elements = AddLoadElements(receiver);
-
-  HValue* hash = BuildElementIndexHash(key);
-
+ HValue* elements,
+                                                           HValue* key,
+                                                           HValue* hash) {
   HValue* capacity = Add<HLoadKeyed>(
       elements,
       Add<HConstant>(NameDictionary::kCapacityIndex),
=======================================
--- /branches/bleeding_edge/src/hydrogen.h      Thu Jun  5 07:33:01 2014 UTC
+++ /branches/bleeding_edge/src/hydrogen.h      Wed Jun 11 14:56:38 2014 UTC
@@ -1309,6 +1309,16 @@
   HBasicBlock* CreateBasicBlock(HEnvironment* env);
   HBasicBlock* CreateLoopHeaderBlock();

+  template <class BitFieldClass>
+  HValue* BuildDecodeField(HValue* encoded_field) {
+    HValue* shifted_field = AddUncasted<HShr>(encoded_field,
+        Add<HConstant>(static_cast<int>(BitFieldClass::kShift)));
+ HValue* mask_value = Add<HConstant>(static_cast<int>(BitFieldClass::kMask)); + return AddUncasted<HBitwise>(Token::BIT_AND, shifted_field, mask_value);
+  }
+
+  HValue* BuildGetElementsKind(HValue* object);
+
   HValue* BuildCheckHeapObject(HValue* object);
   HValue* BuildCheckString(HValue* string);
   HValue* BuildWrapReceiver(HValue* object, HValue* function);
@@ -1335,8 +1345,32 @@

   HValue* BuildNumberToString(HValue* object, Type* type);

+  void BuildJSObjectCheck(HValue* receiver,
+                          int bit_field_mask);
+
+ // Checks a key value that's being used for a keyed element access context. If + // the key is a index, i.e. a smi or a number in a unique string with a cached
+  // numeric value, the "true" of the continuation is joined. Otherwise,
+ // if the key is a name or a unique string, the "false" of the continuation is
+  // joined. Otherwise, a deoptimization is triggered. In both paths of the
+  // continuation, the key is pushed on the top of the environment.
+  void BuildKeyedIndexCheck(HValue* key,
+                            HIfContinuation* join_continuation);
+
+ // Checks the properties of an object if they are in dictionary case, in which
+  // case "true" of continuation is taken, otherwise the "false"
+  void BuildTestForDictionaryProperties(HValue* object,
+                                        HIfContinuation* continuation);
+
+  void BuildNonGlobalObjectCheck(HValue* receiver);
+
+  HValue* BuildKeyedLookupCacheHash(HValue* object,
+                                    HValue* key);
+
   HValue* BuildUncheckedDictionaryElementLoad(HValue* receiver,
-                                              HValue* key);
+                                              HValue* elements,
+                                              HValue* key,
+                                              HValue* hash);

   HValue* BuildRegExpConstructResult(HValue* length,
                                      HValue* index,
@@ -1673,6 +1707,27 @@
     Direction direction_;
     bool finished_;
   };
+
+  template <class A, class P1>
+  void DeoptimizeIf(P1 p1, char* const reason) {
+    IfBuilder builder(this);
+    builder.If<A>(p1);
+    builder.ThenDeopt(reason);
+  }
+
+  template <class A, class P1, class P2>
+  void DeoptimizeIf(P1 p1, P2 p2, const char* reason) {
+    IfBuilder builder(this);
+    builder.If<A>(p1, p2);
+    builder.ThenDeopt(reason);
+  }
+
+  template <class A, class P1, class P2, class P3>
+  void DeoptimizeIf(P1 p1, P2 p2, P3 p3, const char* reason) {
+    IfBuilder builder(this);
+    builder.If<A>(p1, p2, p3);
+    builder.ThenDeopt(reason);
+  }

   HValue* BuildNewElementsCapacity(HValue* old_capacity);

=======================================
--- /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Wed Jun 11 06:59:25 2014 UTC +++ /branches/bleeding_edge/src/ia32/code-stubs-ia32.cc Wed Jun 11 14:56:38 2014 UTC
@@ -121,6 +121,16 @@
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
 }
+
+
+void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { edx, ecx };
+  descriptor->register_param_count_ = 2;
+  descriptor->register_params_ = registers;
+  descriptor->deoptimization_handler_ =
+      Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
+}


 void LoadFieldStub::InitializeInterfaceDescriptor(
=======================================
--- /branches/bleeding_edge/src/ic.cc   Wed Jun 11 09:59:14 2014 UTC
+++ /branches/bleeding_edge/src/ic.cc   Wed Jun 11 14:56:38 2014 UTC
@@ -552,6 +552,23 @@
SetTargetAtAddress(address, GetRawUninitialized(isolate, op), constant_pool);
   PatchInlinedSmiCode(address, DISABLE_INLINED_SMI_CHECK);
 }
+
+
+Handle<Code> KeyedLoadIC::megamorphic_stub() {
+  if (FLAG_compiled_keyed_generic_loads) {
+    return KeyedLoadGenericElementStub(isolate()).GetCode();
+  } else {
+    return isolate()->builtins()->KeyedLoadIC_Generic();
+  }
+}
+
+Handle<Code> KeyedLoadIC::generic_stub() const {
+  if (FLAG_compiled_keyed_generic_loads) {
+    return KeyedLoadGenericElementStub(isolate()).GetCode();
+  } else {
+    return isolate()->builtins()->KeyedLoadIC_Generic();
+  }
+}


 static bool MigrateDeprecated(Handle<Object> object) {
=======================================
--- /branches/bleeding_edge/src/ic.h    Wed Jun 11 09:59:14 2014 UTC
+++ /branches/bleeding_edge/src/ic.h    Wed Jun 11 14:56:38 2014 UTC
@@ -525,12 +525,9 @@

   Handle<Code> LoadElementStub(Handle<JSObject> receiver);

-  virtual Handle<Code> megamorphic_stub() {
-    return isolate()->builtins()->KeyedLoadIC_Generic();
-  }
-  virtual Handle<Code> generic_stub() const {
-    return isolate()->builtins()->KeyedLoadIC_Generic();
-  }
+  virtual Handle<Code> megamorphic_stub();
+  virtual Handle<Code> generic_stub() const;
+
   virtual Handle<Code> slow_stub() const {
     return isolate()->builtins()->KeyedLoadIC_Slow();
   }
=======================================
--- /branches/bleeding_edge/src/isolate.cc      Wed Jun 11 05:48:33 2014 UTC
+++ /branches/bleeding_edge/src/isolate.cc      Wed Jun 11 14:56:38 2014 UTC
@@ -2011,6 +2011,7 @@
     NumberToStringStub::InstallDescriptors(this);
     StringAddStub::InstallDescriptors(this);
     RegExpConstructResultStub::InstallDescriptors(this);
+    KeyedLoadGenericElementStub::InstallDescriptors(this);
   }

   CallDescriptors::InitializeForIsolate(this);
=======================================
--- /branches/bleeding_edge/src/objects.h       Wed Jun 11 09:59:14 2014 UTC
+++ /branches/bleeding_edge/src/objects.h       Wed Jun 11 14:56:38 2014 UTC
@@ -6723,10 +6723,18 @@
static const int kVisitorIdOffset = kInstanceSizesOffset + kVisitorIdByte;

   // Byte offsets within kInstanceAttributesOffset attributes.
+#if V8_TARGET_LITTLE_ENDIAN
+ // Order instance type and bit field together such that they can be loaded + // together as a 16-bit word with instance type in the lower 8 bits regardless
+  // of endianess.
   static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
- static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 1;
-  static const int kBitFieldOffset = kInstanceAttributesOffset + 2;
-  static const int kBitField2Offset = kInstanceAttributesOffset + 3;
+  static const int kBitFieldOffset = kInstanceAttributesOffset + 1;
+#else
+  static const int kBitFieldOffset = kInstanceAttributesOffset + 0;
+  static const int kInstanceTypeOffset = kInstanceAttributesOffset + 1;
+#endif
+  static const int kBitField2Offset = kInstanceAttributesOffset + 2;
+ static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 3;

   STATIC_ASSERT(kInstanceTypeOffset == Internals::kMapInstanceTypeOffset);

@@ -9048,6 +9056,33 @@
  public:
   enum Encoding { ONE_BYTE_ENCODING, TWO_BYTE_ENCODING };

+  // Array index strings this short can keep their index in the hash field.
+  static const int kMaxCachedArrayIndexLength = 7;
+
+ // For strings which are array indexes the hash value has the string length + // mixed into the hash, mainly to avoid a hash value of zero which would be + // the case for the string '0'. 24 bits are used for the array index value.
+  static const int kArrayIndexValueBits = 24;
+  static const int kArrayIndexLengthBits =
+      kBitsPerInt - kArrayIndexValueBits - kNofHashBitFields;
+
+  STATIC_ASSERT((kArrayIndexLengthBits > 0));
+
+ class ArrayIndexValueBits : public BitField<unsigned int, kNofHashBitFields,
+      kArrayIndexValueBits> {};  // NOLINT
+  class ArrayIndexLengthBits : public BitField<unsigned int,
+      kNofHashBitFields + kArrayIndexValueBits,
+      kArrayIndexLengthBits> {};  // NOLINT
+
+  // Check that kMaxCachedArrayIndexLength + 1 is a power of two so we
+ // could use a mask to test if the length of string is less than or equal to
+  // kMaxCachedArrayIndexLength.
+  STATIC_ASSERT(IS_POWER_OF_TWO(kMaxCachedArrayIndexLength + 1));
+
+  static const unsigned int kContainsCachedArrayIndexMask =
+      (~kMaxCachedArrayIndexLength << ArrayIndexLengthBits::kShift) |
+      kIsNotArrayIndexMask;
+
   // Representation of the flat content of a String.
   // A non-flat string doesn't have flat content.
   // A flat string has content that's encoded as a sequence of either
=======================================
--- /branches/bleeding_edge/src/runtime.cc      Wed Jun 11 09:59:14 2014 UTC
+++ /branches/bleeding_edge/src/runtime.cc      Wed Jun 11 14:56:38 2014 UTC
@@ -5560,16 +5560,17 @@
     HeapNumber* number = HeapNumber::cast(*value);
     double_array->set(store_index, number->Number());
   } else {
-    ASSERT(IsFastSmiElementsKind(elements_kind) ||
-           IsFastDoubleElementsKind(elements_kind));
-    ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
-        ? FAST_HOLEY_ELEMENTS
-        : FAST_ELEMENTS;
-    JSObject::TransitionElementsKind(object, transitioned_kind);
-    if (IsMoreGeneralElementsKindTransition(
-            boilerplate_object->GetElementsKind(),
-            transitioned_kind)) {
- JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
+    if (!IsFastObjectElementsKind(elements_kind)) {
+ ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
+          ? FAST_HOLEY_ELEMENTS
+          : FAST_ELEMENTS;
+      JSObject::TransitionElementsKind(object, transitioned_kind);
+      ElementsKind boilerplate_elements_kind =
+          boilerplate_object->GetElementsKind();
+      if (IsMoreGeneralElementsKindTransition(boilerplate_elements_kind,
+                                              transitioned_kind)) {
+ JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
+      }
     }
     FixedArray* object_array = FixedArray::cast(object->elements());
     object_array->set(store_index, *value);
=======================================
--- /branches/bleeding_edge/src/serialize.h     Tue Jun 10 10:51:33 2014 UTC
+++ /branches/bleeding_edge/src/serialize.h     Wed Jun 11 14:56:38 2014 UTC
@@ -34,7 +34,7 @@
 const int kReferenceIdMask = (1 << kReferenceIdBits) - 1;
 const int kReferenceTypeShift = kReferenceIdBits;

-const int kDeoptTableSerializeEntryCount = 12;
+const int kDeoptTableSerializeEntryCount = 64;

 // ExternalReferenceTable is a helper class that defines the relationship
 // between external references and their encodings. It is used to build
=======================================
--- /branches/bleeding_edge/src/x64/code-stubs-x64.cc Wed Jun 11 06:59:25 2014 UTC +++ /branches/bleeding_edge/src/x64/code-stubs-x64.cc Wed Jun 11 14:56:38 2014 UTC
@@ -117,6 +117,16 @@
   descriptor->deoptimization_handler_ =
       Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
 }
+
+
+void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
+    CodeStubInterfaceDescriptor* descriptor) {
+  static Register registers[] = { rdx, rax };
+  descriptor->register_param_count_ = 2;
+  descriptor->register_params_ = registers;
+  descriptor->deoptimization_handler_ =
+      Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
+}


 void LoadFieldStub::InitializeInterfaceDescriptor(

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.

Reply via email to