From d59370ddd50131af90ae53b78b025fc99204d645 Mon Sep 17 00:00:00 2001 From: Chia-hung Duan Date: Thu, 29 Feb 2024 22:54:46 +0000 Subject: [PATCH] [scudo] Split the code paths in quarantineOrDeallocateChunk() Move block untagging logic to another function and sink untagged pointer to quarantineOrDeallocateChunk(). --- compiler-rt/lib/scudo/standalone/combined.h | 78 ++++++++++++++------- 1 file changed, 51 insertions(+), 27 deletions(-) diff --git a/compiler-rt/lib/scudo/standalone/combined.h b/compiler-rt/lib/scudo/standalone/combined.h index 55a7ffc9b6d6cb..8ed4721d23d561 100644 --- a/compiler-rt/lib/scudo/standalone/combined.h +++ b/compiler-rt/lib/scudo/standalone/combined.h @@ -468,7 +468,8 @@ class Allocator { reportDeleteSizeMismatch(Ptr, DeleteSize, Size); } - quarantineOrDeallocateChunk(Options, TaggedPtr, &Header, Size); + quarantineOrDeallocateChunk(Options, TaggedPtr, /*HeaderTaggedPtr=*/Ptr, + &Header, Size); } void *reallocate(void *OldPtr, uptr NewSize, uptr Alignment = MinAlignment) { @@ -567,7 +568,8 @@ class Allocator { void *NewPtr = allocate(NewSize, Chunk::Origin::Malloc, Alignment); if (LIKELY(NewPtr)) { memcpy(NewPtr, OldTaggedPtr, Min(NewSize, OldSize)); - quarantineOrDeallocateChunk(Options, OldTaggedPtr, &Header, OldSize); + quarantineOrDeallocateChunk(Options, OldTaggedPtr, + /*HeaderTaggedPtr=*/OldPtr, &Header, OldSize); } return NewPtr; } @@ -1199,9 +1201,10 @@ class Allocator { } void quarantineOrDeallocateChunk(const Options &Options, void *TaggedPtr, + void *HeaderTaggedPtr, Chunk::UnpackedHeader *Header, uptr Size) NO_THREAD_SAFETY_ANALYSIS { - void *Ptr = getHeaderTaggedPointer(TaggedPtr); + void *Ptr = HeaderTaggedPtr; // If the quarantine is disabled, the actual size of a chunk is 0 or larger // than the maximum allowed, we return a chunk directly to the backend. // This purposefully underflows for Size == 0. @@ -1212,31 +1215,23 @@ class Allocator { Header->State = Chunk::State::Available; else Header->State = Chunk::State::Quarantined; - Header->OriginOrWasZeroed = useMemoryTagging(Options) && - Header->ClassId && - !TSDRegistry.getDisableMemInit(); - Chunk::storeHeader(Cookie, Ptr, Header); + void *BlockBegin; if (UNLIKELY(useMemoryTagging(Options))) { - u8 PrevTag = extractTag(reinterpret_cast(TaggedPtr)); - storeDeallocationStackMaybe(Options, Ptr, PrevTag, Size); - if (Header->ClassId) { - if (!TSDRegistry.getDisableMemInit()) { - uptr TaggedBegin, TaggedEnd; - const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe( - Options, reinterpret_cast(getBlockBegin(Ptr, Header)), - Header->ClassId); - // Exclude the previous tag so that immediate use after free is - // detected 100% of the time. - setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin, - &TaggedEnd); - } - } + Header->OriginOrWasZeroed = + Header->ClassId && !TSDRegistry.getDisableMemInit(); + BlockBegin = unTagBlock(Options, TaggedPtr, HeaderTaggedPtr, Header, Size, + BypassQuarantine); + } else { + Header->OriginOrWasZeroed = 0U; + if (BypassQuarantine && allocatorSupportsMemoryTagging()) + Ptr = untagPointer(Ptr); + BlockBegin = getBlockBegin(Ptr, Header); } + + Chunk::storeHeader(Cookie, Ptr, Header); + if (BypassQuarantine) { - if (allocatorSupportsMemoryTagging()) - Ptr = untagPointer(Ptr); - void *BlockBegin = getBlockBegin(Ptr, Header); const uptr ClassId = Header->ClassId; if (LIKELY(ClassId)) { bool CacheDrained; @@ -1251,9 +1246,6 @@ class Allocator { if (CacheDrained) Primary.tryReleaseToOS(ClassId, ReleaseToOS::Normal); } else { - if (UNLIKELY(useMemoryTagging(Options))) - storeTags(reinterpret_cast(BlockBegin), - reinterpret_cast(Ptr)); Secondary.deallocate(Options, BlockBegin); } } else { @@ -1263,6 +1255,38 @@ class Allocator { } } + NOINLINE void *unTagBlock(const Options &Options, void *TaggedPtr, + void *HeaderTaggedPtr, + Chunk::UnpackedHeader *Header, const uptr Size, + bool BypassQuarantine) { + DCHECK(useMemoryTagging(Options)); + void *Ptr = HeaderTaggedPtr; + + const u8 PrevTag = extractTag(reinterpret_cast(TaggedPtr)); + storeDeallocationStackMaybe(Options, Ptr, PrevTag, Size); + if (Header->ClassId) { + if (!TSDRegistry.getDisableMemInit()) { + uptr TaggedBegin, TaggedEnd; + const uptr OddEvenMask = computeOddEvenMaskForPointerMaybe( + Options, reinterpret_cast(getBlockBegin(Ptr, Header)), + Header->ClassId); + // Exclude the previous tag so that immediate use after free is + // detected 100% of the time. + setRandomTag(Ptr, Size, OddEvenMask | (1UL << PrevTag), &TaggedBegin, + &TaggedEnd); + } + } + + Ptr = untagPointer(Ptr); + void *BlockBegin = getBlockBegin(Ptr, Header); + if (BypassQuarantine && !Header->ClassId) { + storeTags(reinterpret_cast(BlockBegin), + reinterpret_cast(Ptr)); + } + + return BlockBegin; + } + bool getChunkFromBlock(uptr Block, uptr *Chunk, Chunk::UnpackedHeader *Header) { *Chunk =