EricWF created this revision. EricWF added reviewers: mclow.lists, danalbert, compnerd, majnemer. EricWF added a subscriber: cfe-commits.
In 32 bit builds the __cxa_exception class has an alignment requirement greater than the maximal alignment supported by malloc. If this is the case we need to manually align the pointers returned from malloc in ordered to prevent undefined behavior. This patch does exactly that. See PR24604 for more information - https://llvm.org/bugs/show_bug.cgi?id=24604 http://reviews.llvm.org/D12512 Files: src/cxa_exception.cpp test/test_cxa_allocate_exception.pass.cpp
Index: test/test_cxa_allocate_exception.pass.cpp =================================================================== --- /dev/null +++ test/test_cxa_allocate_exception.pass.cpp @@ -0,0 +1,65 @@ +//===--------------------- test_fallback_malloc.cpp -----------------------===// +// +// The LLVM Compiler Infrastructure +// +// This file is dual licensed under the MIT and the University of Illinois Open +// Source Licenses. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// +// UNSUPPORTED: c++98, c++03 + +// void* __cxa_allocate_exception(size_t); +// void __cxa_free_exception(void*); +// void* __cxa_allocate_dependent_exception(); +// void __cxa_free_dependent_exception(); + +// __cxa_exception is specified with __attribute__((aligned)) for GNU unwind. +// This means that during 32 bit builds __cxa_exception is "over aligned". This +// test checks that __cxa_allocate_exception return correctly overaligned memory. +// See PR24604 - https://llvm.org/bugs/show_bug.cgi?id=24604 + +#include "../src/cxa_exception.hpp" +#include <cstddef> // for std::max_align_t +#include <cassert> + +#if defined(__GNUC__) && !defined(_LP64) +#define SHOULD_BE_OVERALIGNED 1 +#endif + +using namespace __cxxabiv1; + +const std::size_t max_alignment = alignof(std::max_align_t); +const std::size_t required_alignment = alignof(__cxa_exception); +const bool requires_over_alignment = max_alignment < required_alignment; + +static_assert(alignof(__cxa_exception) == alignof(__cxa_dependent_exception), + "They should have the same alignment requirements"); + +#if defined(__GNUC__) && !defined(_LP64) +static_assert(alignof(__cxa_exception) > alignof(std::max_align_t), + "On 32 bit platforms __cxa_exception is expected to be over aligned."); +#endif + +void test_cxa_allocate_exception() { + for (int i=0; i < 4096; ++i) { + void* ptr = __cxa_allocate_exception(i); + assert(ptr); + assert(reinterpret_cast<std::size_t>(ptr) % required_alignment == 0); + __cxa_free_exception(ptr); + } +} + +void test_cxa_allocate_dependent_exception() { + for (int i=0; i < 100; ++i) { + void* ptr = __cxa_allocate_dependent_exception(); + assert(ptr); + assert(reinterpret_cast<std::size_t>(ptr) % required_alignment == 0); + __cxa_free_dependent_exception(ptr); + } +} + +int main() { + test_cxa_allocate_exception(); + test_cxa_allocate_dependent_exception(); +} Index: src/cxa_exception.cpp =================================================================== --- src/cxa_exception.cpp +++ src/cxa_exception.cpp @@ -17,6 +17,7 @@ #include <exception> // for std::terminate #include <cstdlib> // for malloc, free #include <cstring> // for memset +#include <cstddef> // for std::max_align_t #if !LIBCXXABI_HAS_NO_THREADS # include <pthread.h> // for fallback_malloc.ipp's mutexes #endif @@ -68,6 +69,16 @@ return cxa_exception_from_thrown_object(unwind_exception + 1 ); } +// Return true if the alignment requirements of __cxa_exception are greater +// than the fundamental alignment requirements. In this case we have to manually +// align pointers returned from malloc. +static +inline +bool +cxa_exception_must_be_overaligned() { + return alignof(__cxa_exception) > alignof(std::max_align_t); +} + static inline size_t @@ -106,16 +117,52 @@ #include "fallback_malloc.ipp" +// Same as std::align. This implementation is cribbed from libc++. +void* +align(size_t alignment, size_t size, void*& ptr, size_t& space) +{ + void* r = nullptr; + if (size <= space) + { + char* p1 = static_cast<char*>(ptr); + char* p2 = reinterpret_cast<char*>(reinterpret_cast<size_t>(p1 + (alignment - 1)) & -alignment); + size_t d = static_cast<size_t>(p2 - p1); + if (d <= space - size) + { + r = p2; + ptr = r; + space -= d; + } + } + return r; +} + // Allocate some memory from _somewhere_ static void *do_malloc(size_t size) { - void *ptr = std::malloc(size); + size_t aligned_size = size; + if (cxa_exception_must_be_overaligned()) { + aligned_size = (size + alignof(__cxa_exception) - alignof(void*)) + sizeof(void*); + } + + void *ptr = std::malloc(aligned_size); if (NULL == ptr) // if malloc fails, fall back to emergency stash - ptr = fallback_malloc(size); + ptr = fallback_malloc(aligned_size); + + if (cxa_exception_must_be_overaligned()) { + void *orig_ptr = ptr; + ptr = static_cast<char*>(ptr) + sizeof(void*); + align(alignof(__cxa_exception), size, ptr, aligned_size); + *(static_cast<void**>(ptr) - 1) = orig_ptr; + } + return ptr; } static void do_free(void *ptr) { - is_fallback_ptr(ptr) ? fallback_free(ptr) : std::free(ptr); + void* p = ptr; + if (cxa_exception_must_be_overaligned()) + p = *(static_cast<void**>(ptr) - 1); + is_fallback_ptr(p) ? fallback_free(p) : std::free(p); } /* @@ -156,7 +203,7 @@ // object. Zero-fill the object. If memory can't be allocated, call // std::terminate. Return a pointer to the memory to be used for the // user's exception object. -void * __cxa_allocate_exception (size_t thrown_size) throw() { +void * __cxa_allocate_exception(size_t thrown_size) throw() { size_t actual_size = cxa_exception_size_from_exception_thrown_size(thrown_size); __cxa_exception* exception_header = static_cast<__cxa_exception*>(do_malloc(actual_size)); if (NULL == exception_header)
_______________________________________________ cfe-commits mailing list cfe-commits@lists.llvm.org http://lists.llvm.org/cgi-bin/mailman/listinfo/cfe-commits