Skip to content

Commit

Permalink
scudo: Move the management of the UseMemoryTagging bit out of the Pri…
Browse files Browse the repository at this point in the history
…mary. NFCI.

The primary and secondary allocators will need to share this bit,
so move the management of the bit to the combined allocator and
make useMemoryTagging() a free function.

Differential Revision: https://reviews.llvm.org/D93730
  • Loading branch information
pcc committed Dec 23, 2020
1 parent e6b3db6 commit faac1c0
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 33 deletions.
30 changes: 16 additions & 14 deletions compiler-rt/lib/scudo/standalone/combined.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class Allocator {

// Reset tag to 0 as this chunk may have been previously used for a tagged
// user allocation.
if (UNLIKELY(Allocator.useMemoryTagging()))
if (UNLIKELY(useMemoryTagging<Params>(Allocator.Primary.Options.load())))
storeTags(reinterpret_cast<uptr>(Ptr),
reinterpret_cast<uptr>(Ptr) + sizeof(QuarantineBatch));

Expand Down Expand Up @@ -161,6 +161,9 @@ class Allocator {
Primary.Options.set(OptionBit::DeallocTypeMismatch);
if (getFlags()->delete_size_mismatch)
Primary.Options.set(OptionBit::DeleteSizeMismatch);
if (allocatorSupportsMemoryTagging<Params>() &&
systemSupportsMemoryTagging())
Primary.Options.set(OptionBit::UseMemoryTagging);
Primary.Options.set(OptionBit::UseOddEvenTags);

QuarantineMaxChunkSize =
Expand Down Expand Up @@ -240,7 +243,7 @@ class Allocator {
}

ALWAYS_INLINE void *untagPointerMaybe(void *Ptr) {
if (Primary.SupportsMemoryTagging)
if (allocatorSupportsMemoryTagging<Params>())
return reinterpret_cast<void *>(
untagPointer(reinterpret_cast<uptr>(Ptr)));
return Ptr;
Expand Down Expand Up @@ -367,7 +370,7 @@ class Allocator {
//
// When memory tagging is enabled, zeroing the contents is done as part of
// setting the tag.
if (UNLIKELY(useMemoryTagging(Options))) {
if (UNLIKELY(useMemoryTagging<Params>(Options))) {
uptr PrevUserPtr;
Chunk::UnpackedHeader Header;
const uptr BlockSize = PrimaryT::getSizeByClassId(ClassId);
Expand Down Expand Up @@ -594,7 +597,7 @@ class Allocator {
: BlockEnd - (reinterpret_cast<uptr>(OldPtr) + NewSize)) &
Chunk::SizeOrUnusedBytesMask;
Chunk::compareExchangeHeader(Cookie, OldPtr, &NewHeader, &OldHeader);
if (UNLIKELY(ClassId && useMemoryTagging(Options))) {
if (UNLIKELY(ClassId && useMemoryTagging<Params>(Options))) {
resizeTaggedChunk(reinterpret_cast<uptr>(OldTaggedPtr) + OldSize,
reinterpret_cast<uptr>(OldTaggedPtr) + NewSize,
BlockEnd);
Expand Down Expand Up @@ -692,7 +695,7 @@ class Allocator {
if (getChunkFromBlock(Block, &Chunk, &Header) &&
Header.State == Chunk::State::Allocated) {
uptr TaggedChunk = Chunk;
if (useMemoryTagging(Primary.Options.load()))
if (useMemoryTagging<Params>(Primary.Options.load()))
TaggedChunk = loadTag(Chunk);
Callback(TaggedChunk, getSize(reinterpret_cast<void *>(Chunk), &Header),
Arg);
Expand Down Expand Up @@ -783,15 +786,14 @@ class Allocator {
Header.State == Chunk::State::Allocated;
}

bool useMemoryTagging() const {
return useMemoryTagging(Primary.Options.load());
bool useMemoryTaggingTestOnly() const {
return useMemoryTagging<Params>(Primary.Options.load());
}
static bool useMemoryTagging(Options Options) {
return PrimaryT::useMemoryTagging(Options);
void disableMemoryTagging() {
if (allocatorSupportsMemoryTagging<Params>())
Primary.Options.clear(OptionBit::UseMemoryTagging);
}

void disableMemoryTagging() { Primary.disableMemoryTagging(); }

void setTrackAllocationStacks(bool Track) {
initThreadMaybe();
if (Track)
Expand Down Expand Up @@ -823,7 +825,7 @@ class Allocator {
const char *MemoryTags, uintptr_t MemoryAddr,
size_t MemorySize) {
*ErrorInfo = {};
if (!PrimaryT::SupportsMemoryTagging ||
if (!allocatorSupportsMemoryTagging<Params>() ||
MemoryAddr + MemorySize < MemoryAddr)
return;

Expand Down Expand Up @@ -942,7 +944,7 @@ class Allocator {

static_assert(MinAlignment >= sizeof(Chunk::PackedHeader),
"Minimal alignment must at least cover a chunk header.");
static_assert(!PrimaryT::SupportsMemoryTagging ||
static_assert(!allocatorSupportsMemoryTagging<Params>() ||
MinAlignment >= archMemoryTagGranuleSize(),
"");

Expand Down Expand Up @@ -1037,7 +1039,7 @@ class Allocator {
void quarantineOrDeallocateChunk(Options Options, void *Ptr,
Chunk::UnpackedHeader *Header, uptr Size) {
Chunk::UnpackedHeader NewHeader = *Header;
if (UNLIKELY(NewHeader.ClassId && useMemoryTagging(Options))) {
if (UNLIKELY(NewHeader.ClassId && useMemoryTagging<Params>(Options))) {
u8 PrevTag = extractTag(loadTag(reinterpret_cast<uptr>(Ptr)));
if (!TSDRegistry.getDisableMemInit()) {
uptr TaggedBegin, TaggedEnd;
Expand Down
6 changes: 6 additions & 0 deletions compiler-rt/lib/scudo/standalone/options.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

#include "atomic_helpers.h"
#include "common.h"
#include "memtag.h"

namespace scudo {

Expand All @@ -36,6 +37,11 @@ struct Options {
}
};

template <typename Config> bool useMemoryTagging(Options Options) {
return allocatorSupportsMemoryTagging<Config>() &&
Options.get(OptionBit::UseMemoryTagging);
}

struct AtomicOptions {
atomic_u32 Val;

Expand Down
4 changes: 0 additions & 4 deletions compiler-rt/lib/scudo/standalone/primary32.h
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ template <typename Config> class SizeClassAllocator32 {
typedef SizeClassAllocator32<Config> ThisT;
typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
typedef typename CacheT::TransferBatch TransferBatch;
static const bool SupportsMemoryTagging = false;

static uptr getSizeByClassId(uptr ClassId) {
return (ClassId == SizeClassMap::BatchClassId)
Expand Down Expand Up @@ -216,9 +215,6 @@ template <typename Config> class SizeClassAllocator32 {
return TotalReleasedBytes;
}

static bool useMemoryTagging(UNUSED Options Options) { return false; }
void disableMemoryTagging() {}

const char *getRegionInfoArrayAddress() const { return nullptr; }
static uptr getRegionInfoArraySize() { return 0; }

Expand Down
12 changes: 1 addition & 11 deletions compiler-rt/lib/scudo/standalone/primary64.h
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@ template <typename Config> class SizeClassAllocator64 {
typedef SizeClassAllocator64<Config> ThisT;
typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
typedef typename CacheT::TransferBatch TransferBatch;
static const bool SupportsMemoryTagging =
allocatorSupportsMemoryTagging<Config>();

static uptr getSizeByClassId(uptr ClassId) {
return (ClassId == SizeClassMap::BatchClassId)
Expand Down Expand Up @@ -76,9 +74,6 @@ template <typename Config> class SizeClassAllocator64 {
Region->ReleaseInfo.LastReleaseAtNs = Time;
}
setOption(Option::ReleaseInterval, static_cast<sptr>(ReleaseToOsInterval));

if (SupportsMemoryTagging && systemSupportsMemoryTagging())
Options.set(OptionBit::UseMemoryTagging);
}
void init(s32 ReleaseToOsInterval) {
memset(this, 0, sizeof(*this));
Expand Down Expand Up @@ -193,11 +188,6 @@ template <typename Config> class SizeClassAllocator64 {
return TotalReleasedBytes;
}

static bool useMemoryTagging(Options Options) {
return SupportsMemoryTagging && Options.get(OptionBit::UseMemoryTagging);
}
void disableMemoryTagging() { Options.clear(OptionBit::UseMemoryTagging); }

const char *getRegionInfoArrayAddress() const {
return reinterpret_cast<const char *>(RegionInfoArray);
}
Expand Down Expand Up @@ -335,7 +325,7 @@ template <typename Config> class SizeClassAllocator64 {
if (!map(reinterpret_cast<void *>(RegionBeg + MappedUser), UserMapSize,
"scudo:primary",
MAP_ALLOWNOMEM | MAP_RESIZABLE |
(useMemoryTagging(Options.load()) ? MAP_MEMTAG : 0),
(useMemoryTagging<Config>(Options.load()) ? MAP_MEMTAG : 0),
&Region->Data))
return nullptr;
Region->MappedUser += UserMapSize;
Expand Down
8 changes: 4 additions & 4 deletions compiler-rt/lib/scudo/standalone/tests/combined_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ bool isPrimaryAllocation(scudo::uptr Size, scudo::uptr Alignment) {
template <class AllocatorT>
bool isTaggedAllocation(AllocatorT *Allocator, scudo::uptr Size,
scudo::uptr Alignment) {
return Allocator->useMemoryTagging() &&
return Allocator->useMemoryTaggingTestOnly() &&
scudo::systemDetectsMemoryTagFaultsTestOnly() &&
isPrimaryAllocation<AllocatorT>(Size, Alignment);
}
Expand Down Expand Up @@ -162,7 +162,7 @@ template <class Config> static void testAllocator() {
for (scudo::uptr I = 0; I < Size; I++) {
unsigned char V = (reinterpret_cast<unsigned char *>(P))[I];
if (isPrimaryAllocation<AllocatorT>(Size, 1U << MinAlignLog) &&
!Allocator->useMemoryTagging())
!Allocator->useMemoryTaggingTestOnly())
ASSERT_EQ(V, scudo::PatternFillByte);
else
ASSERT_TRUE(V == scudo::PatternFillByte || V == 0);
Expand Down Expand Up @@ -248,7 +248,7 @@ template <class Config> static void testAllocator() {

Allocator->releaseToOS();

if (Allocator->useMemoryTagging() &&
if (Allocator->useMemoryTaggingTestOnly() &&
scudo::systemDetectsMemoryTagFaultsTestOnly()) {
// Check that use-after-free is detected.
for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
Expand Down Expand Up @@ -493,7 +493,7 @@ TEST(ScudoCombinedTest, OddEven) {
using SizeClassMap = AllocatorT::PrimaryT::SizeClassMap;
auto Allocator = std::unique_ptr<AllocatorT>(new AllocatorT());

if (!Allocator->useMemoryTagging())
if (!Allocator->useMemoryTaggingTestOnly())
return;

auto CheckOddEven = [](scudo::uptr P1, scudo::uptr P2) {
Expand Down

0 comments on commit faac1c0

Please sign in to comment.