Skip to content

Commit 8dd9615

Browse files
authored
Revert "[scudo] Make local cache be agnostic to the type of node in f… (#68626)
…reelist (#67379)" This reverts commit b9c6737.
1 parent 540a1a6 commit 8dd9615

File tree

6 files changed

+118
-167
lines changed

6 files changed

+118
-167
lines changed

compiler-rt/lib/scudo/standalone/CMakeLists.txt

-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@ if(ANDROID)
5656
endif()
5757

5858
set(SCUDO_HEADERS
59-
allocator_common.h
6059
allocator_config.h
6160
atomic_helpers.h
6261
bytemap.h

compiler-rt/lib/scudo/standalone/allocator_common.h

-85
This file was deleted.

compiler-rt/lib/scudo/standalone/local_cache.h

+91-19
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,74 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
2222
typedef typename SizeClassAllocator::SizeClassMap SizeClassMap;
2323
typedef typename SizeClassAllocator::CompactPtrT CompactPtrT;
2424

25+
struct TransferBatch {
26+
static const u16 MaxNumCached = SizeClassMap::MaxNumCachedHint;
27+
void setFromArray(CompactPtrT *Array, u16 N) {
28+
DCHECK_LE(N, MaxNumCached);
29+
Count = N;
30+
memcpy(Batch, Array, sizeof(Batch[0]) * Count);
31+
}
32+
void appendFromArray(CompactPtrT *Array, u16 N) {
33+
DCHECK_LE(N, MaxNumCached - Count);
34+
memcpy(Batch + Count, Array, sizeof(Batch[0]) * N);
35+
// u16 will be promoted to int by arithmetic type conversion.
36+
Count = static_cast<u16>(Count + N);
37+
}
38+
void appendFromTransferBatch(TransferBatch *B, u16 N) {
39+
DCHECK_LE(N, MaxNumCached - Count);
40+
DCHECK_GE(B->Count, N);
41+
// Append from the back of `B`.
42+
memcpy(Batch + Count, B->Batch + (B->Count - N), sizeof(Batch[0]) * N);
43+
// u16 will be promoted to int by arithmetic type conversion.
44+
Count = static_cast<u16>(Count + N);
45+
B->Count = static_cast<u16>(B->Count - N);
46+
}
47+
void clear() { Count = 0; }
48+
void add(CompactPtrT P) {
49+
DCHECK_LT(Count, MaxNumCached);
50+
Batch[Count++] = P;
51+
}
52+
void copyToArray(CompactPtrT *Array) const {
53+
memcpy(Array, Batch, sizeof(Batch[0]) * Count);
54+
}
55+
u16 getCount() const { return Count; }
56+
bool isEmpty() const { return Count == 0U; }
57+
CompactPtrT get(u16 I) const {
58+
DCHECK_LE(I, Count);
59+
return Batch[I];
60+
}
61+
static u16 getMaxCached(uptr Size) {
62+
return Min(MaxNumCached, SizeClassMap::getMaxCachedHint(Size));
63+
}
64+
TransferBatch *Next;
65+
66+
private:
67+
CompactPtrT Batch[MaxNumCached];
68+
u16 Count;
69+
};
70+
71+
// A BatchGroup is used to collect blocks. Each group has a group id to
72+
// identify the group kind of contained blocks.
73+
struct BatchGroup {
74+
// `Next` is used by IntrusiveList.
75+
BatchGroup *Next;
76+
// The compact base address of each group
77+
uptr CompactPtrGroupBase;
78+
// Cache value of TransferBatch::getMaxCached()
79+
u16 MaxCachedPerBatch;
80+
// Number of blocks pushed into this group. This is an increment-only
81+
// counter.
82+
uptr PushedBlocks;
83+
// This is used to track how many bytes are not in-use since last time we
84+
// tried to release pages.
85+
uptr BytesInBGAtLastCheckpoint;
86+
// Blocks are managed by TransferBatch in a list.
87+
SinglyLinkedList<TransferBatch> Batches;
88+
};
89+
90+
static_assert(sizeof(BatchGroup) <= sizeof(TransferBatch),
91+
"BatchGroup uses the same class size as TransferBatch");
92+
2593
void init(GlobalStats *S, SizeClassAllocator *A) {
2694
DCHECK(isEmpty());
2795
Stats.init();
@@ -83,7 +151,7 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
83151
}
84152

85153
void drain() {
86-
// Drain BatchClassId last as it may be needed while draining normal blocks.
154+
// Drain BatchClassId last as createBatch can refill it.
87155
for (uptr I = 0; I < NumClasses; ++I) {
88156
if (I == BatchClassId)
89157
continue;
@@ -95,11 +163,19 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
95163
DCHECK(isEmpty());
96164
}
97165

98-
void *getBatchClassBlock() {
99-
void *B = allocate(BatchClassId);
166+
TransferBatch *createBatch(uptr ClassId, void *B) {
167+
if (ClassId != BatchClassId)
168+
B = allocate(BatchClassId);
100169
if (UNLIKELY(!B))
101170
reportOutOfMemory(SizeClassAllocator::getSizeByClassId(BatchClassId));
102-
return B;
171+
return reinterpret_cast<TransferBatch *>(B);
172+
}
173+
174+
BatchGroup *createGroup() {
175+
void *Ptr = allocate(BatchClassId);
176+
if (UNLIKELY(!Ptr))
177+
reportOutOfMemory(SizeClassAllocator::getSizeByClassId(BatchClassId));
178+
return reinterpret_cast<BatchGroup *>(Ptr);
103179
}
104180

105181
LocalStats &getStats() { return Stats; }
@@ -127,11 +203,6 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
127203
Str->append(" No block is cached.\n");
128204
}
129205

130-
static u16 getMaxCached(uptr Size) {
131-
return Min(SizeClassMap::MaxNumCachedHint,
132-
SizeClassMap::getMaxCachedHint(Size));
133-
}
134-
135206
private:
136207
static const uptr NumClasses = SizeClassMap::NumClasses;
137208
static const uptr BatchClassId = SizeClassMap::BatchClassId;
@@ -140,7 +211,7 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
140211
u16 MaxCount;
141212
// Note: ClassSize is zero for the transfer batch.
142213
uptr ClassSize;
143-
CompactPtrT Chunks[2 * SizeClassMap::MaxNumCachedHint];
214+
CompactPtrT Chunks[2 * TransferBatch::MaxNumCached];
144215
};
145216
PerClass PerClassArray[NumClasses] = {};
146217
LocalStats Stats;
@@ -157,7 +228,7 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
157228
for (uptr I = 0; I < NumClasses; I++) {
158229
PerClass *P = &PerClassArray[I];
159230
const uptr Size = SizeClassAllocator::getSizeByClassId(I);
160-
P->MaxCount = static_cast<u16>(2 * getMaxCached(Size));
231+
P->MaxCount = static_cast<u16>(2 * TransferBatch::getMaxCached(Size));
161232
if (I != BatchClassId) {
162233
P->ClassSize = Size;
163234
} else {
@@ -175,14 +246,15 @@ template <class SizeClassAllocator> struct SizeClassAllocatorLocalCache {
175246

176247
NOINLINE bool refill(PerClass *C, uptr ClassId) {
177248
initCacheMaybe(C);
178-
179-
// TODO(chiahungduan): Pass the max number cached for each size class.
180-
const u16 NumBlocksRefilled =
181-
Allocator->popBlocks(this, ClassId, C->Chunks);
182-
DCHECK_LE(NumBlocksRefilled,
183-
getMaxCached(SizeClassAllocator::getSizeByClassId(ClassId)));
184-
C->Count += NumBlocksRefilled;
185-
return NumBlocksRefilled != 0;
249+
TransferBatch *B = Allocator->popBatch(this, ClassId);
250+
if (UNLIKELY(!B))
251+
return false;
252+
DCHECK_GT(B->getCount(), 0);
253+
C->Count = B->getCount();
254+
B->copyToArray(C->Chunks);
255+
B->clear();
256+
destroyBatch(ClassId, B);
257+
return true;
186258
}
187259

188260
NOINLINE void drain(PerClass *C, uptr ClassId) {

compiler-rt/lib/scudo/standalone/primary32.h

+12-29
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
#ifndef SCUDO_PRIMARY32_H_
1010
#define SCUDO_PRIMARY32_H_
1111

12-
#include "allocator_common.h"
1312
#include "bytemap.h"
1413
#include "common.h"
1514
#include "list.h"
@@ -54,11 +53,8 @@ template <typename Config> class SizeClassAllocator32 {
5453
"");
5554
typedef SizeClassAllocator32<Config> ThisT;
5655
typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
57-
typedef TransferBatch<ThisT> TransferBatch;
58-
typedef BatchGroup<ThisT> BatchGroup;
59-
60-
static_assert(sizeof(BatchGroup) <= sizeof(TransferBatch),
61-
"BatchGroup uses the same class size as TransferBatch");
56+
typedef typename CacheT::TransferBatch TransferBatch;
57+
typedef typename CacheT::BatchGroup BatchGroup;
6258

6359
static uptr getSizeByClassId(uptr ClassId) {
6460
return (ClassId == SizeClassMap::BatchClassId)
@@ -191,21 +187,6 @@ template <typename Config> class SizeClassAllocator32 {
191187
return BlockSize > PageSize;
192188
}
193189

194-
u16 popBlocks(CacheT *C, uptr ClassId, CompactPtrT *ToArray) {
195-
TransferBatch *B = popBatch(C, ClassId);
196-
if (!B)
197-
return 0;
198-
199-
const u16 Count = B->getCount();
200-
DCHECK_GT(Count, 0U);
201-
B->moveToArray(ToArray);
202-
203-
if (ClassId != SizeClassMap::BatchClassId)
204-
C->deallocate(SizeClassMap::BatchClassId, B);
205-
206-
return Count;
207-
}
208-
209190
TransferBatch *popBatch(CacheT *C, uptr ClassId) {
210191
DCHECK_LT(ClassId, NumClasses);
211192
SizeClassInfo *Sci = getSizeClassInfo(ClassId);
@@ -539,8 +520,8 @@ template <typename Config> class SizeClassAllocator32 {
539520
// from `CreateGroup` in `pushBlocksImpl`
540521
BG->PushedBlocks = 1;
541522
BG->BytesInBGAtLastCheckpoint = 0;
542-
BG->MaxCachedPerBatch =
543-
CacheT::getMaxCached(getSizeByClassId(SizeClassMap::BatchClassId));
523+
BG->MaxCachedPerBatch = TransferBatch::getMaxCached(
524+
getSizeByClassId(SizeClassMap::BatchClassId));
544525

545526
Sci->FreeListInfo.BlockList.push_front(BG);
546527
}
@@ -619,17 +600,17 @@ template <typename Config> class SizeClassAllocator32 {
619600
DCHECK_GT(Size, 0U);
620601

621602
auto CreateGroup = [&](uptr CompactPtrGroupBase) {
622-
BatchGroup *BG = reinterpret_cast<BatchGroup *>(C->getBatchClassBlock());
603+
BatchGroup *BG = C->createGroup();
623604
BG->Batches.clear();
624-
TransferBatch *TB =
625-
reinterpret_cast<TransferBatch *>(C->getBatchClassBlock());
605+
TransferBatch *TB = C->createBatch(ClassId, nullptr);
626606
TB->clear();
627607

628608
BG->CompactPtrGroupBase = CompactPtrGroupBase;
629609
BG->Batches.push_front(TB);
630610
BG->PushedBlocks = 0;
631611
BG->BytesInBGAtLastCheckpoint = 0;
632-
BG->MaxCachedPerBatch = CacheT::getMaxCached(getSizeByClassId(ClassId));
612+
BG->MaxCachedPerBatch =
613+
TransferBatch::getMaxCached(getSizeByClassId(ClassId));
633614

634615
return BG;
635616
};
@@ -644,7 +625,9 @@ template <typename Config> class SizeClassAllocator32 {
644625
u16 UnusedSlots =
645626
static_cast<u16>(BG->MaxCachedPerBatch - CurBatch->getCount());
646627
if (UnusedSlots == 0) {
647-
CurBatch = reinterpret_cast<TransferBatch *>(C->getBatchClassBlock());
628+
CurBatch = C->createBatch(
629+
ClassId,
630+
reinterpret_cast<void *>(decompactPtr(ClassId, Array[I])));
648631
CurBatch->clear();
649632
Batches.push_front(CurBatch);
650633
UnusedSlots = BG->MaxCachedPerBatch;
@@ -792,7 +775,7 @@ template <typename Config> class SizeClassAllocator32 {
792775
}
793776

794777
const uptr Size = getSizeByClassId(ClassId);
795-
const u16 MaxCount = CacheT::getMaxCached(Size);
778+
const u16 MaxCount = TransferBatch::getMaxCached(Size);
796779
DCHECK_GT(MaxCount, 0U);
797780
// The maximum number of blocks we should carve in the region is dictated
798781
// by the maximum number of batches we want to fill, and the amount of

0 commit comments

Comments
 (0)