llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT--> @llvm/pr-subscribers-compiler-rt-sanitizer Author: Vitaly Buka (vitalybuka) <details> <summary>Changes</summary> For such threads we have no registers, so no exact stack range, and no guaranties that stack is mapped at all. To avoid crashes on unmapped memory, `MemCpyAccessible` copies intersting range into temporarily buffer, and we search for pointers there. --- Full diff: https://github.com/llvm/llvm-project/pull/112807.diff 2 Files Affected: - (modified) compiler-rt/lib/lsan/lsan_common.cpp (+40) - (modified) compiler-rt/lib/lsan/lsan_flags.inc (+2) ``````````diff diff --git a/compiler-rt/lib/lsan/lsan_common.cpp b/compiler-rt/lib/lsan/lsan_common.cpp index 9aed36b96ce929..05f1edae0f05c1 100644 --- a/compiler-rt/lib/lsan/lsan_common.cpp +++ b/compiler-rt/lib/lsan/lsan_common.cpp @@ -293,6 +293,27 @@ struct DirectMemoryAccessor { void Init(uptr begin, uptr end) {}; void *LoadPtr(uptr p) const { return *reinterpret_cast<void **>(p); } }; + +struct CopyLoader { + void Init(uptr begin, uptr end) { + buffer.clear(); + buffer.resize(end - begin); + offset = reinterpret_cast<uptr>(buffer.data()) - begin; + + // Need a partial data? + MemCpyAccessible(buffer.data(), reinterpret_cast<void *>(begin), + buffer.size()); + }; + void *LoadPtr(uptr p) const { + CHECK_LE(p + offset + sizeof(void *), + reinterpret_cast<uptr>(buffer.data() + buffer.size())); + return *reinterpret_cast<void **>(p + offset); + } + + private: + uptr offset; + InternalMmapVector<char> buffer; +}; } // namespace // Scans the memory range, looking for byte patterns that point into allocator @@ -535,6 +556,7 @@ static void ProcessThread(tid_t os_id, uptr sp, static void ProcessThreads(SuspendedThreadsList const &suspended_threads, Frontier *frontier, tid_t caller_tid, uptr caller_sp) { + InternalMmapVector<tid_t> done_threads; InternalMmapVector<uptr> registers; InternalMmapVector<Range> extra_ranges; for (uptr i = 0; i < suspended_threads.ThreadCount(); i++) { @@ -559,6 +581,24 @@ static void ProcessThreads(SuspendedThreadsList const &suspended_threads, DirectMemoryAccessor accessor; ProcessThread(os_id, sp, registers, extra_ranges, frontier, accessor); + done_threads.push_back(os_id); + } + + if (flags()->use_detached) { + CopyLoader accessor; + InternalMmapVector<tid_t> known_threads; + GetRunningThreadsLocked(&known_threads); + Sort(done_threads.data(), done_threads.size()); + for (tid_t os_id : known_threads) { + registers.clear(); + extra_ranges.clear(); + + uptr i = InternalLowerBound(done_threads, os_id); + if (i >= done_threads.size() || done_threads[i] != os_id) { + uptr sp = (os_id == caller_tid) ? caller_sp : 0; + ProcessThread(os_id, sp, registers, extra_ranges, frontier, accessor); + } + } } // Add pointers reachable from ThreadContexts diff --git a/compiler-rt/lib/lsan/lsan_flags.inc b/compiler-rt/lib/lsan/lsan_flags.inc index c97b021ba5c02f..09d759302fdd5d 100644 --- a/compiler-rt/lib/lsan/lsan_flags.inc +++ b/compiler-rt/lib/lsan/lsan_flags.inc @@ -41,6 +41,8 @@ LSAN_FLAG(bool, use_ld_allocations, true, LSAN_FLAG(bool, use_unaligned, false, "Consider unaligned pointers valid.") LSAN_FLAG(bool, use_poisoned, false, "Consider pointers found in poisoned memory to be valid.") +LSAN_FLAG(bool, use_detached, false, + "Scan threads even attaching to them failed.") LSAN_FLAG(bool, log_pointers, false, "Debug logging") LSAN_FLAG(bool, log_threads, false, "Debug logging") LSAN_FLAG(int, tries, 1, "Debug option to repeat leak checking multiple times") `````````` </details> https://github.com/llvm/llvm-project/pull/112807 _______________________________________________ llvm-branch-commits mailing list llvm-branch-commits@lists.llvm.org https://lists.llvm.org/cgi-bin/mailman/listinfo/llvm-branch-commits