Title: [138398] trunk/Source/WTF
Revision
138398
Author
[email protected]
Date
2012-12-21 14:22:24 -0800 (Fri, 21 Dec 2012)

Log Message

Further harden FastMalloc
https://bugs.webkit.org/show_bug.cgi?id=105656

Reviewed by Gavin Barraclough.

This increases the degree to which we harden the FastMalloc
linked lists.  We now also mask the previous and next pointers
in the doubly linked list implementation.  I've also made
the masking itself somewhat more complex without a measurable
cost.  We still use ASLR to provide some general entropy, but
we blind the pointers against each nodes 'this' pointer.

* wtf/FastMalloc.cpp:
(WTF::ClassIndex):
(WTF::SLL_Next):
(WTF::SLL_SetNext):
(WTF::Span::next):
(WTF::Span::prev):
(WTF::Span::setNext):
(WTF::Span::setPrev):
(Span):
  As Span now has to do masking on the next and previous pointers,
  I've updated the code to use accessors instead.
(WTF::DLL_Init):
(WTF::DLL_Remove):
(WTF::DLL_IsEmpty):
(WTF::DLL_Length):
(WTF::DLL_Prepend):
(WTF::TCMalloc_Central_FreeList::enumerateFreeObjects):
(WTF::TCMalloc_PageHeap::scavenge):
(WTF::TCMalloc_PageHeap::New):
(WTF::TCMalloc_PageHeap::AllocLarge):
(WTF::TCMalloc_PageHeap::ReturnedBytes):
(WTF::TCMalloc_PageHeap::Check):
(WTF::TCMalloc_PageHeap::CheckList):
(WTF::TCMalloc_PageHeap::ReleaseFreeList):
(WTF::TCMalloc_Central_FreeList::FetchFromSpans):

Modified Paths

Diff

Modified: trunk/Source/WTF/ChangeLog (138397 => 138398)


--- trunk/Source/WTF/ChangeLog	2012-12-21 22:17:43 UTC (rev 138397)
+++ trunk/Source/WTF/ChangeLog	2012-12-21 22:22:24 UTC (rev 138398)
@@ -1,3 +1,43 @@
+2012-12-21  Oliver Hunt  <[email protected]>
+
+        Further harden FastMalloc
+        https://bugs.webkit.org/show_bug.cgi?id=105656
+
+        Reviewed by Gavin Barraclough.
+
+        This increases the degree to which we harden the FastMalloc
+        linked lists.  We now also mask the previous and next pointers
+        in the doubly linked list implementation.  I've also made
+        the masking itself somewhat more complex without a measurable
+        cost.  We still use ASLR to provide some general entropy, but
+        we blind the pointers against each nodes 'this' pointer.
+
+        * wtf/FastMalloc.cpp:
+        (WTF::ClassIndex):
+        (WTF::SLL_Next):
+        (WTF::SLL_SetNext):
+        (WTF::Span::next):
+        (WTF::Span::prev):
+        (WTF::Span::setNext):
+        (WTF::Span::setPrev):
+        (Span):
+          As Span now has to do masking on the next and previous pointers,
+          I've updated the code to use accessors instead.
+        (WTF::DLL_Init):
+        (WTF::DLL_Remove):
+        (WTF::DLL_IsEmpty):
+        (WTF::DLL_Length):
+        (WTF::DLL_Prepend):
+        (WTF::TCMalloc_Central_FreeList::enumerateFreeObjects):
+        (WTF::TCMalloc_PageHeap::scavenge):
+        (WTF::TCMalloc_PageHeap::New):
+        (WTF::TCMalloc_PageHeap::AllocLarge):
+        (WTF::TCMalloc_PageHeap::ReturnedBytes):
+        (WTF::TCMalloc_PageHeap::Check):
+        (WTF::TCMalloc_PageHeap::CheckList):
+        (WTF::TCMalloc_PageHeap::ReleaseFreeList):
+        (WTF::TCMalloc_Central_FreeList::FetchFromSpans):
+
 2012-12-20  Ryuan Choi  <[email protected]>
 
         [EFL] Build break with latest EFL libraries.

Modified: trunk/Source/WTF/wtf/FastMalloc.cpp (138397 => 138398)


--- trunk/Source/WTF/wtf/FastMalloc.cpp	2012-12-21 22:17:43 UTC (rev 138397)
+++ trunk/Source/WTF/wtf/FastMalloc.cpp	2012-12-21 22:22:24 UTC (rev 138398)
@@ -510,11 +510,14 @@
  * freelist manipulation much more difficult.
  */
 static const char kLLHardeningMask = 0;
-#define MASK_PTR(ptr) (reinterpret_cast<typeof(ptr)>(reinterpret_cast<uintptr_t>(ptr)^reinterpret_cast<uintptr_t>(&kLLHardeningMask)))
-#define UNMASK_PTR(ptr) (reinterpret_cast<typeof(ptr)>(reinterpret_cast<uintptr_t>(ptr)^reinterpret_cast<uintptr_t>(&kLLHardeningMask)))
+enum {
+    MaskAddrShift = 8,
+    MaskKeyShift = 4
+};
+#define ROTATE_VALUE(value, amount) (((value) >> (amount)) | ((value) << (sizeof(value) * 8 - (amount))))
+#define XOR_MASK_PTR_WITH_KEY(ptr, key) (reinterpret_cast<typeof(ptr)>(reinterpret_cast<uintptr_t>(ptr)^ROTATE_VALUE(reinterpret_cast<uintptr_t>(key), MaskKeyShift)^ROTATE_VALUE(reinterpret_cast<uintptr_t>(&kLLHardeningMask), MaskAddrShift)))
 #else
-#define MASK_PTR(ptr) (ptr)
-#define UNMASK_PTR(ptr) (ptr)
+#define XOR_MASK_PTR(ptr, key) (ptr1)
 #endif
 
 
@@ -684,11 +687,11 @@
 // storage.
 
 static inline void *SLL_Next(void *t) {
-  return UNMASK_PTR(*(reinterpret_cast<void**>(t)));
+  return XOR_MASK_PTR_WITH_KEY(*(reinterpret_cast<void**>(t)), t);
 }
 
 static inline void SLL_SetNext(void *t, void *n) {
-  *(reinterpret_cast<void**>(t)) = MASK_PTR(n);
+  *(reinterpret_cast<void**>(t)) = XOR_MASK_PTR_WITH_KEY(n, t);
 }
 
 static inline void SLL_Push(void **list, void *element) {
@@ -1028,8 +1031,15 @@
 struct Span {
   PageID        start;          // Starting page number
   Length        length;         // Number of pages in span
-  Span*         next;           // Used when in link list
-  Span*         prev;           // Used when in link list
+  Span* next() const { return XOR_MASK_PTR_WITH_KEY(m_next, this); }
+  Span* prev() const { return XOR_MASK_PTR_WITH_KEY(m_prev, this); }
+  void setNext(Span* next) { m_next = XOR_MASK_PTR_WITH_KEY(next, this); }
+  void setPrev(Span* prev) { m_prev = XOR_MASK_PTR_WITH_KEY(prev, this); }
+
+private:
+  Span*         m_next;           // Used when in link list
+  Span*         m_prev;           // Used when in link list
+public:
   void*         objects;        // Linked list of free objects
   unsigned int  free : 1;       // Is the span free
 #ifndef NO_TCMALLOC_SAMPLES
@@ -1087,24 +1097,24 @@
 // -------------------------------------------------------------------------
 
 static inline void DLL_Init(Span* list) {
-  list->next = list;
-  list->prev = list;
+  list->setNext(list);
+  list->setPrev(list);
 }
 
 static inline void DLL_Remove(Span* span) {
-  span->prev->next = span->next;
-  span->next->prev = span->prev;
-  span->prev = NULL;
-  span->next = NULL;
+  span->prev()->setNext(span->next());
+  span->next()->setPrev(span->prev());
+  span->setPrev(NULL);
+  span->setNext(NULL);
 }
 
 static ALWAYS_INLINE bool DLL_IsEmpty(const Span* list) {
-  return list->next == list;
+  return list->next() == list;
 }
 
 static int DLL_Length(const Span* list) {
   int result = 0;
-  for (Span* s = list->next; s != list; s = s->next) {
+  for (Span* s = list->next(); s != list; s = s->next()) {
     result++;
   }
   return result;
@@ -1121,12 +1131,12 @@
 #endif
 
 static inline void DLL_Prepend(Span* list, Span* span) {
-  ASSERT(span->next == NULL);
-  ASSERT(span->prev == NULL);
-  span->next = list->next;
-  span->prev = list;
-  list->next->prev = span;
-  list->next = span;
+  ASSERT(span->next() == NULL);
+  ASSERT(span->prev() == NULL);
+  span->setNext(list->next());
+  span->setPrev(list);
+  list->next()->setPrev(span);
+  list->setNext(span);
 }
 
 //-------------------------------------------------------------------
@@ -1162,16 +1172,16 @@
   template <class Finder, class Reader>
   void enumerateFreeObjects(Finder& finder, const Reader& reader, TCMalloc_Central_FreeList* remoteCentralFreeList)
   {
-    for (Span* span = &empty_; span && span != &empty_; span = (span->next ? reader(span->next) : 0))
+    for (Span* span = &empty_; span && span != &empty_; span = (span->next() ? reader(span->next()) : 0))
       ASSERT(!span->objects);
 
     ASSERT(!nonempty_.objects);
     static const ptrdiff_t nonemptyOffset = reinterpret_cast<const char*>(&nonempty_) - reinterpret_cast<const char*>(this);
 
     Span* remoteNonempty = reinterpret_cast<Span*>(reinterpret_cast<char*>(remoteCentralFreeList) + nonemptyOffset);
-    Span* remoteSpan = nonempty_.next;
+    Span* remoteSpan = nonempty_.next();
 
-    for (Span* span = reader(remoteSpan); span && remoteSpan != remoteNonempty; remoteSpan = span->next, span = (span->next ? reader(span->next) : 0)) {
+    for (Span* span = reader(remoteSpan); span && remoteSpan != remoteNonempty; remoteSpan = span->next(), span = (span->next() ? reader(span->next()) : 0)) {
       for (void* nextObject = span->objects; nextObject; nextObject = reader.nextEntryInLinkedList(reinterpret_cast<void**>(nextObject)))
         finder.visit(nextObject);
     }
@@ -1821,7 +1831,7 @@
             size_t length = DLL_Length(&slist->normal);
             size_t numSpansToReturn = (i > kMinSpanListsWithSpans) ? length : length / 2;
             for (int j = 0; static_cast<size_t>(j) < numSpansToReturn && !DLL_IsEmpty(&slist->normal) && free_committed_pages_ > targetPageCount; j++) {
-                Span* s = slist->normal.prev; 
+                Span* s = slist->normal.prev();
                 DLL_Remove(s);
                 ASSERT(!s->decommitted);
                 if (!s->decommitted) {
@@ -1870,7 +1880,7 @@
       continue;
     }
 
-    Span* result = ll->next;
+    Span* result = ll->next();
     Carve(result, n, released);
 #if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
     // The newly allocated memory is from a span that's in the normal span list (already committed).  Update the
@@ -1907,9 +1917,9 @@
   Span *best = NULL;
 
   // Search through normal list
-  for (Span* span = large_.normal.next;
+  for (Span* span = large_.normal.next();
        span != &large_.normal;
-       span = span->next) {
+       span = span->next()) {
     if (span->length >= n) {
       if ((best == NULL)
           || (span->length < best->length)
@@ -1921,9 +1931,9 @@
   }
 
   // Search through released list in case it has a better fit
-  for (Span* span = large_.returned.next;
+  for (Span* span = large_.returned.next();
        span != &large_.returned;
-       span = span->next) {
+       span = span->next()) {
     if (span->length >= n) {
       if ((best == NULL)
           || (span->length < best->length)
@@ -2175,7 +2185,7 @@
         result += r_pages << kPageShift;
     }
     
-    for (Span* s = large_.returned.next; s != &large_.returned; s = s->next)
+    for (Span* s = large_.returned.next(); s != &large_.returned; s = s->next())
         result += s->length << kPageShift;
     return result;
 }
@@ -2302,8 +2312,8 @@
 #if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
   size_t totalFreeCommitted = 0;
 #endif
-  ASSERT(free_[0].normal.next == &free_[0].normal);
-  ASSERT(free_[0].returned.next == &free_[0].returned);
+  ASSERT(free_[0].normal.next() == &free_[0].normal);
+  ASSERT(free_[0].returned.next() == &free_[0].returned);
 #if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
   totalFreeCommitted = CheckList(&large_.normal, kMaxPages, 1000000000, false);
 #else
@@ -2331,7 +2341,7 @@
 #else
 size_t TCMalloc_PageHeap::CheckList(Span* list, Length min_pages, Length max_pages, bool decommitted) {
   size_t freeCount = 0;
-  for (Span* s = list->next; s != list; s = s->next) {
+  for (Span* s = list->next(); s != list; s = s->next()) {
     CHECK_CONDITION(s->free);
     CHECK_CONDITION(s->length >= min_pages);
     CHECK_CONDITION(s->length <= max_pages);
@@ -2352,7 +2362,7 @@
 #endif
 
   while (!DLL_IsEmpty(list)) {
-    Span* s = list->prev;
+    Span* s = list->prev();
 
     DLL_Remove(s);
     s->decommitted = true;
@@ -2876,7 +2886,7 @@
 
 void* TCMalloc_Central_FreeList::FetchFromSpans() {
   if (DLL_IsEmpty(&nonempty_)) return NULL;
-  Span* span = nonempty_.next;
+  Span* span = nonempty_.next();
 
   ASSERT(span->objects != NULL);
   ASSERT_SPAN_COMMITTED(span);
_______________________________________________
webkit-changes mailing list
[email protected]
http://lists.webkit.org/mailman/listinfo/webkit-changes

Reply via email to