Revision: 5167
Author: [email protected]
Date: Mon Aug  2 08:08:17 2010
Log: Revert r5147 due to failing assert, with no simple solution. Issue 808.
Review URL: http://codereview.chromium.org/3087001
http://code.google.com/p/v8/source/detail?r=5167

Modified:
 /branches/bleeding_edge/src/bootstrapper.cc
 /branches/bleeding_edge/src/contexts.h
 /branches/bleeding_edge/src/heap.cc
 /branches/bleeding_edge/src/heap.h
 /branches/bleeding_edge/src/objects-debug.cc
 /branches/bleeding_edge/src/objects-inl.h
 /branches/bleeding_edge/src/objects.cc
 /branches/bleeding_edge/src/objects.h
 /branches/bleeding_edge/src/v8-counters.h

=======================================
--- /branches/bleeding_edge/src/bootstrapper.cc Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/bootstrapper.cc Mon Aug  2 08:08:17 2010
@@ -231,7 +231,6 @@
   bool InstallNatives();
   void InstallCustomCallGenerators();
   void InstallJSFunctionResultCaches();
-  void InitializeNormalizedMapCaches();
// Used both for deserialized and from-scratch contexts to add the extensions
   // provided.
   static bool InstallExtensions(Handle<Context> global_context,
@@ -1391,13 +1390,6 @@

   global_context()->set_jsfunction_result_caches(*caches);
 }
-
-
-void Genesis::InitializeNormalizedMapCaches() {
-  Handle<FixedArray> array(
-      Factory::NewFixedArray(NormalizedMapCache::kEntries, TENURED));
- global_context()->set_normalized_map_cache(NormalizedMapCache::cast(*array));
-}


 int BootstrapperActive::nesting_ = 0;
@@ -1766,7 +1758,6 @@
     HookUpGlobalProxy(inner_global, global_proxy);
     InitializeGlobal(inner_global, empty_function);
     InstallJSFunctionResultCaches();
-    InitializeNormalizedMapCaches();
     if (!InstallNatives()) return;

     MakeFunctionInstancePrototypeWritable();
=======================================
--- /branches/bleeding_edge/src/contexts.h      Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/contexts.h      Mon Aug  2 08:08:17 2010
@@ -85,7 +85,6 @@
   V(CONFIGURE_GLOBAL_INDEX, JSFunction, configure_global_fun) \
   V(FUNCTION_CACHE_INDEX, JSObject, function_cache) \
   V(JSFUNCTION_RESULT_CACHES_INDEX, FixedArray, jsfunction_result_caches) \
-  V(NORMALIZED_MAP_CACHE_INDEX, NormalizedMapCache, normalized_map_cache) \
   V(RUNTIME_CONTEXT_INDEX, Context, runtime_context) \
V(CALL_AS_FUNCTION_DELEGATE_INDEX, JSFunction, call_as_function_delegate) \
   V(CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, JSFunction, \
@@ -210,7 +209,6 @@
     CONFIGURE_GLOBAL_INDEX,
     FUNCTION_CACHE_INDEX,
     JSFUNCTION_RESULT_CACHES_INDEX,
-    NORMALIZED_MAP_CACHE_INDEX,
     RUNTIME_CONTEXT_INDEX,
     CALL_AS_FUNCTION_DELEGATE_INDEX,
     CALL_AS_CONSTRUCTOR_DELEGATE_INDEX,
=======================================
--- /branches/bleeding_edge/src/heap.cc Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/heap.cc Mon Aug  2 08:08:17 2010
@@ -568,22 +568,6 @@
   ClearThreadJSFunctionResultCachesVisitor visitor;
   ThreadManager::IterateArchivedThreads(&visitor);
 }
-
-
-class ClearThreadNormalizedMapCachesVisitor: public ThreadVisitor {
-  virtual void VisitThread(ThreadLocalTop* top) {
-    Context* context = top->context_;
-    if (context == NULL) return;
-    context->global()->global_context()->normalized_map_cache()->Clear();
-  }
-};
-
-
-void Heap::ClearNormalizedMapCaches() {
-  if (Bootstrapper::IsActive()) return;
-  ClearThreadNormalizedMapCachesVisitor visitor;
-  ThreadManager::IterateArchivedThreads(&visitor);
-}


 #ifdef DEBUG
@@ -776,8 +760,6 @@
   CompletelyClearInstanceofCache();

   if (is_compacting) FlushNumberStringCache();
-
-  ClearNormalizedMapCaches();
 }


=======================================
--- /branches/bleeding_edge/src/heap.h  Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/heap.h  Mon Aug  2 08:08:17 2010
@@ -1022,8 +1022,6 @@

   static void ClearJSFunctionResultCaches();

-  static void ClearNormalizedMapCaches();
-
   static GCTracer* tracer() { return tracer_; }

  private:
=======================================
--- /branches/bleeding_edge/src/objects-debug.cc        Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/objects-debug.cc        Mon Aug  2 08:08:17 2010
@@ -644,16 +644,6 @@
   VerifyHeapPointer(prototype());
   VerifyHeapPointer(instance_descriptors());
 }
-
-
-void Map::NormalizedMapVerify() {
-  MapVerify();
-  ASSERT(instance_descriptors() == Heap::empty_descriptor_array());
-  ASSERT(code_cache() == Heap::empty_fixed_array());
-  ASSERT(pre_allocated_property_fields() == 0);
-  ASSERT(unused_property_fields() == 0);
- ASSERT(scavenger() == Heap::GetScavenger(instance_type(), instance_size()));
-}


 void CodeCache::CodeCachePrint() {
@@ -1369,21 +1359,6 @@
     }
   }
 }
-
-
-void NormalizedMapCache::NormalizedMapCacheVerify() {
-  FixedArray::cast(this)->Verify();
-  if (FLAG_enable_slow_asserts) {
-    for (int i = 0; i < length(); i++) {
-      Object* e = get(i);
-      if (e->IsMap()) {
-        Map::cast(e)->NormalizedMapVerify();
-      } else {
-        ASSERT(e->IsUndefined());
-      }
-    }
-  }
-}


 #endif  // DEBUG
=======================================
--- /branches/bleeding_edge/src/objects-inl.h   Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/objects-inl.h   Mon Aug  2 08:08:17 2010
@@ -572,18 +572,6 @@
 #endif
   return true;
 }
-
-
-bool Object::IsNormalizedMapCache() {
-  if (!IsFixedArray()) return false;
-  if (FixedArray::cast(this)->length() != NormalizedMapCache::kEntries) {
-    return false;
-  }
-#ifdef DEBUG
-  reinterpret_cast<NormalizedMapCache*>(this)->NormalizedMapCacheVerify();
-#endif
-  return true;
-}


 bool Object::IsCompilationCacheTable() {
@@ -1639,7 +1627,6 @@
 CAST_ACCESSOR(DescriptorArray)
 CAST_ACCESSOR(SymbolTable)
 CAST_ACCESSOR(JSFunctionResultCache)
-CAST_ACCESSOR(NormalizedMapCache)
 CAST_ACCESSOR(CompilationCacheTable)
 CAST_ACCESSOR(CodeCacheHashTable)
 CAST_ACCESSOR(MapCache)
=======================================
--- /branches/bleeding_edge/src/objects.cc      Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/objects.cc      Mon Aug  2 08:08:17 2010
@@ -2112,81 +2112,6 @@
   LocalLookup(name, &result);
   return GetPropertyAttribute(this, &result, name, false);
 }
-
-
-Object* NormalizedMapCache::Get(Map* fast, PropertyNormalizationMode mode) {
-  int index = Hash(fast) % kEntries;
-  Object* obj = get(index);
-
-  if (obj->IsMap() && CheckHit(Map::cast(obj), fast, mode)) {
-#ifdef DEBUG
-    if (FLAG_enable_slow_asserts) {
- // The cached map should match freshly created normalized map bit-by-bit.
-      Object* fresh = fast->CopyNormalized(mode);
-      if (!fresh->IsFailure()) {
-        // Copy the unused byte so that the assertion below works.
-        Map::cast(fresh)->address()[Map::kUnusedOffset] =
-            Map::cast(obj)->address()[Map::kUnusedOffset];
-        ASSERT(memcmp(Map::cast(fresh)->address(),
-                      Map::cast(obj)->address(),
-                      Map::kSize) == 0);
-      }
-    }
-#endif
-    return obj;
-  }
-
-  obj = fast->CopyNormalized(mode);
-  if (obj->IsFailure()) return obj;
-  set(index, obj);
-  Counters::normalized_maps.Increment();
-
-  return obj;
-}
-
-
-void NormalizedMapCache::Clear() {
-  int entries = length();
-  for (int i = 0; i != entries; i++) {
-    set_undefined(i);
-  }
-}
-
-
-int NormalizedMapCache::Hash(Map* fast) {
- // For performance reasons we only hash the 3 most variable fields of a map:
-  // constructor, prototype and bit_field2.
-
-  // Shift away the tag.
-  int hash = (static_cast<uint32_t>(
-        reinterpret_cast<uintptr_t>(fast->constructor())) >> 2);
-
- // XOR-ing the prototype and constructor directly yields too many zero bits
-  // when the two pointers are close (which is fairly common).
- // To avoid this we shift the prototype 4 bits relatively to the constructor.
-  hash ^= (static_cast<uint32_t>(
-        reinterpret_cast<uintptr_t>(fast->prototype())) << 2);
-
-  return hash ^ (hash >> 16) ^ fast->bit_field2();
-}
-
-
-bool NormalizedMapCache::CheckHit(Map* slow,
-                                  Map* fast,
-                                  PropertyNormalizationMode mode) {
-#ifdef DEBUG
-  slow->NormalizedMapVerify();
-#endif
-  return
-    slow->constructor() == fast->constructor() &&
-    slow->prototype() == fast->prototype() &&
-    slow->inobject_properties() == ((mode == CLEAR_INOBJECT_PROPERTIES) ?
-                                    0 :
-                                    fast->inobject_properties()) &&
-    slow->instance_type() == fast->instance_type() &&
-    slow->bit_field() == fast->bit_field() &&
-    slow->bit_field2() == fast->bit_field2();
-}


 Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
@@ -2253,22 +2178,29 @@
   int index = map()->instance_descriptors()->NextEnumerationIndex();
   dictionary->SetNextEnumerationIndex(index);

-  obj = Top::context()->global_context()->
-      normalized_map_cache()->Get(map(), mode);
+  // Allocate new map.
+  obj = map()->CopyDropDescriptors();
   if (obj->IsFailure()) return obj;
   Map* new_map = Map::cast(obj);

+  // Clear inobject properties if needed by adjusting the instance size and
+  // putting in a filler object instead of the inobject properties.
+ if (mode == CLEAR_INOBJECT_PROPERTIES && map()->inobject_properties() > 0) {
+    int instance_size_delta = map()->inobject_properties() * kPointerSize;
+    int new_instance_size = map()->instance_size() - instance_size_delta;
+    new_map->set_inobject_properties(0);
+    new_map->set_instance_size(new_instance_size);
+    new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
+                                              new_map->instance_size()));
+    Heap::CreateFillerObjectAt(this->address() + new_instance_size,
+                               instance_size_delta);
+  }
+  new_map->set_unused_property_fields(0);
+
   // We have now successfully allocated all the necessary objects.
// Changes can now be made with the guarantee that all of them take effect.
-
-  // Resize the object in the heap if necessary.
-  int new_instance_size = new_map->instance_size();
-  int instance_size_delta = map()->instance_size() - new_instance_size;
-  ASSERT(instance_size_delta >= 0);
-  Heap::CreateFillerObjectAt(this->address() + new_instance_size,
-                             instance_size_delta);
-
   set_map(new_map);
+  map()->set_instance_descriptors(Heap::empty_descriptor_array());

   set_properties(dictionary);

@@ -3162,33 +3094,6 @@
   Map::cast(result)->ClearCodeCache();
   return result;
 }
-
-
-Object* Map::CopyNormalized(PropertyNormalizationMode mode) {
-  int new_instance_size = instance_size();
-  if (mode == CLEAR_INOBJECT_PROPERTIES) {
-    new_instance_size -= inobject_properties() * kPointerSize;
-  }
-
-  Object* result = Heap::AllocateMap(instance_type(), new_instance_size);
-  if (result->IsFailure()) return result;
-
-  if (mode != CLEAR_INOBJECT_PROPERTIES) {
-    Map::cast(result)->set_inobject_properties(inobject_properties());
-  }
-
-  Map::cast(result)->set_prototype(prototype());
-  Map::cast(result)->set_constructor(constructor());
-
-  Map::cast(result)->set_bit_field(bit_field());
-  Map::cast(result)->set_bit_field2(bit_field2());
-
-#ifdef DEBUG
-  Map::cast(result)->NormalizedMapVerify();
-#endif
-
-  return result;
-}


 Object* Map::CopyDropTransitions() {
=======================================
--- /branches/bleeding_edge/src/objects.h       Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/objects.h       Mon Aug  2 08:08:17 2010
@@ -631,7 +631,6 @@
   inline bool IsDictionary();
   inline bool IsSymbolTable();
   inline bool IsJSFunctionResultCache();
-  inline bool IsNormalizedMapCache();
   inline bool IsCompilationCacheTable();
   inline bool IsCodeCacheHashTable();
   inline bool IsMapCache();
@@ -2388,31 +2387,6 @@
 };


-// The cache for maps used by normalized (dictionary mode) objects.
-// Such maps do not have property descriptors, so a typical program
-// needs very limited number of distinct normalized maps.
-class NormalizedMapCache: public FixedArray {
- public:
-  static const int kEntries = 64;
-
-  Object* Get(Map* fast, PropertyNormalizationMode mode);
-
-  void Clear();
-
-  // Casting
-  static inline NormalizedMapCache* cast(Object* obj);
-
-#ifdef DEBUG
-  void NormalizedMapCacheVerify();
-#endif
-
- private:
-  static int Hash(Map* fast);
-
- static bool CheckHit(Map* slow, Map* fast, PropertyNormalizationMode mode);
-};
-
-
// ByteArray represents fixed sized byte arrays. Used by the outside world,
 // such as PCRE, and also by the memory allocator and garbage collector to
 // fill in free blocks in the heap.
@@ -3056,8 +3030,6 @@

   Object* CopyDropDescriptors();

-  Object* CopyNormalized(PropertyNormalizationMode mode);
-
   // Returns a copy of the map, with all transitions dropped from the
   // instance descriptors.
   Object* CopyDropTransitions();
@@ -3121,7 +3093,6 @@
 #ifdef DEBUG
   void MapPrint();
   void MapVerify();
-  void NormalizedMapVerify();
 #endif

   inline Scavenger scavenger();
@@ -3160,8 +3131,6 @@
   static const int kPreAllocatedPropertyFieldsOffset =
       kInstanceSizesOffset + kPreAllocatedPropertyFieldsByte;
   // The byte at position 3 is not in use at the moment.
-  static const int kUnusedByte = 3;
-  static const int kUnusedOffset = kInstanceSizesOffset + kUnusedByte;

   // Byte offsets within kInstanceAttributesOffset attributes.
   static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
=======================================
--- /branches/bleeding_edge/src/v8-counters.h   Wed Jul 28 08:08:32 2010
+++ /branches/bleeding_edge/src/v8-counters.h   Mon Aug  2 08:08:17 2010
@@ -67,7 +67,6 @@
   SC(pcre_mallocs, V8.PcreMallocCount)                                \
   /* OS Memory allocated */                                           \
   SC(memory_allocated, V8.OsMemoryAllocated)                          \
-  SC(normalized_maps, V8.NormalizedMaps)                              \
   SC(props_to_dictionary, V8.ObjectPropertiesToDictionary)            \
   SC(elements_to_dictionary, V8.ObjectElementsToDictionary)           \
   SC(alive_after_last_gc, V8.AliveAfterLastGC)                        \

--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev

Reply via email to