17 #ifndef LLVM_SUPPORT_ALLOCATOR_H
18 #define LLVM_SUPPORT_ALLOCATOR_H
61 template <
typename AllocatorT = MallocAllocator,
size_t SlabSize = 4096,
62 size_t SizeThreshold = SlabSize,
size_t GrowthDelay = 128>
64 :
public AllocatorBase<BumpPtrAllocatorImpl<AllocatorT, SlabSize,
65 SizeThreshold, GrowthDelay>>,
68 static_assert(SizeThreshold <= SlabSize,
69 "The SizeThreshold must be at most the SlabSize to ensure "
70 "that objects larger than a slab go into their own memory "
72 static_assert(GrowthDelay > 0,
73 "GrowthDelay must be at least 1 which already increases the"
74 "slab size after each allocated slab.");
85 : AllocatorT(static_cast<AllocatorT &&>(Old)), CurPtr(Old.CurPtr),
86 End(Old.End), Slabs(
std::
move(Old.Slabs)),
87 CustomSizedSlabs(
std::
move(Old.CustomSizedSlabs)),
88 BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize) {
89 Old.CurPtr = Old.End =
nullptr;
90 Old.BytesAllocated = 0;
92 Old.CustomSizedSlabs.clear();
96 DeallocateSlabs(Slabs.begin(), Slabs.end());
97 DeallocateCustomSizedSlabs();
101 DeallocateSlabs(Slabs.begin(), Slabs.end());
102 DeallocateCustomSizedSlabs();
106 BytesAllocated =
RHS.BytesAllocated;
107 RedZoneSize =
RHS.RedZoneSize;
110 AllocatorT::operator=(
static_cast<AllocatorT &&
>(
RHS));
112 RHS.CurPtr =
RHS.End =
nullptr;
113 RHS.BytesAllocated = 0;
115 RHS.CustomSizedSlabs.clear();
123 DeallocateCustomSizedSlabs();
124 CustomSizedSlabs.
clear();
131 CurPtr = (
char *)Slabs.front();
132 End = CurPtr + SlabSize;
135 DeallocateSlabs(std::next(Slabs.begin()), Slabs.end());
136 Slabs.
erase(std::next(Slabs.begin()), Slabs.end());
148 BytesAllocated += Size;
151 assert(Adjustment + Size >= Size &&
"Adjustment + Size must not overflow");
153 size_t SizeToAllocate = Size;
154 #if LLVM_ADDRESS_SANITIZER_BUILD
156 SizeToAllocate += RedZoneSize;
160 if (Adjustment + SizeToAllocate <=
size_t(End - CurPtr)
162 && CurPtr !=
nullptr) {
163 char *AlignedPtr = CurPtr + Adjustment;
164 CurPtr = AlignedPtr + SizeToAllocate;
175 size_t PaddedSize = SizeToAllocate + Alignment.value() - 1;
176 if (PaddedSize > SizeThreshold) {
178 AllocatorT::Allocate(PaddedSize,
alignof(std::max_align_t));
182 CustomSizedSlabs.push_back(std::make_pair(NewSlab, PaddedSize));
184 uintptr_t AlignedAddr =
alignAddr(NewSlab, Alignment);
185 assert(AlignedAddr + Size <= (uintptr_t)NewSlab + PaddedSize);
186 char *AlignedPtr = (
char*)AlignedAddr;
194 uintptr_t AlignedAddr =
alignAddr(CurPtr, Alignment);
195 assert(AlignedAddr + SizeToAllocate <= (uintptr_t)End &&
196 "Unable to allocate memory!");
197 char *AlignedPtr = (
char*)AlignedAddr;
198 CurPtr = AlignedPtr + SizeToAllocate;
206 assert(Alignment > 0 &&
"0-byte alignment is not allowed. Use 1 instead.");
223 size_t GetNumSlabs()
const {
return Slabs.size() + CustomSizedSlabs.size(); }
231 const char *
P =
static_cast<const char *
>(Ptr);
232 int64_t InSlabIdx = 0;
233 for (
size_t Idx = 0,
E = Slabs.size(); Idx <
E; Idx++) {
234 const char *
S =
static_cast<const char *
>(Slabs[Idx]);
235 if (
P >=
S &&
P <
S + computeSlabSize(Idx))
236 return InSlabIdx +
static_cast<int64_t
>(
P -
S);
237 InSlabIdx +=
static_cast<int64_t
>(computeSlabSize(Idx));
241 int64_t InCustomSizedSlabIdx = -1;
242 for (
size_t Idx = 0,
E = CustomSizedSlabs.size(); Idx <
E; Idx++) {
243 const char *
S =
static_cast<const char *
>(CustomSizedSlabs[Idx].first);
244 size_t Size = CustomSizedSlabs[Idx].second;
245 if (
P >=
S &&
P <
S + Size)
246 return InCustomSizedSlabIdx -
static_cast<int64_t
>(
P -
S);
247 InCustomSizedSlabIdx -=
static_cast<int64_t
>(Size);
258 assert(Out &&
"Wrong allocator used");
272 template <
typename T>
275 assert(Out %
alignof(
T) == 0 &&
"Wrong alignment information");
276 return Out /
alignof(
T);
280 size_t TotalMemory = 0;
281 for (
auto I = Slabs.begin(),
E = Slabs.end();
I !=
E; ++
I)
282 TotalMemory += computeSlabSize(std::distance(Slabs.begin(),
I));
283 for (
const auto &PtrAndSize : CustomSizedSlabs)
284 TotalMemory += PtrAndSize.second;
291 RedZoneSize = NewSize;
303 char *CurPtr =
nullptr;
317 size_t BytesAllocated = 0;
321 size_t RedZoneSize = 1;
323 static size_t computeSlabSize(
unsigned SlabIdx) {
329 ((
size_t)1 << std::min<size_t>(30, SlabIdx / GrowthDelay));
334 void StartNewSlab() {
335 size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
338 AllocatorT::Allocate(AllocatedSlabSize,
alignof(std::max_align_t));
343 Slabs.push_back(NewSlab);
344 CurPtr = (
char *)(NewSlab);
345 End = ((
char *)NewSlab) + AllocatedSlabSize;
351 for (;
I !=
E; ++
I) {
352 size_t AllocatedSlabSize =
353 computeSlabSize(std::distance(Slabs.begin(),
I));
354 AllocatorT::Deallocate(*
I, AllocatedSlabSize,
alignof(std::max_align_t));
359 void DeallocateCustomSizedSlabs() {
360 for (
auto &PtrAndSize : CustomSizedSlabs) {
361 void *Ptr = PtrAndSize.first;
362 size_t Size = PtrAndSize.second;
363 AllocatorT::Deallocate(Ptr, Size,
alignof(std::max_align_t));
401 auto DestroyElements = [](
char *Begin,
char *End) {
403 for (
char *Ptr = Begin; Ptr +
sizeof(
T) <= End; Ptr +=
sizeof(
T))
404 reinterpret_cast<T *
>(Ptr)->~
T();
409 size_t AllocatedSlabSize = BumpPtrAllocator::computeSlabSize(
411 char *Begin = (
char *)
alignAddr(*
I, Align::Of<T>());
413 : (
char *)*
I + AllocatedSlabSize;
415 DestroyElements(Begin, End);
418 for (
auto &PtrAndSize :
Allocator.CustomSizedSlabs) {
419 void *Ptr = PtrAndSize.first;
420 size_t Size = PtrAndSize.second;
421 DestroyElements((
char *)
alignAddr(Ptr, Align::Of<T>()),
434 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold,
441 alignof(std::max_align_t)));
444 template <
typename AllocatorT,
size_t SlabSize,
size_t SizeThreshold,
446 void operator delete(
void *,
448 SizeThreshold, GrowthDelay> &) {
451 #endif // LLVM_SUPPORT_ALLOCATOR_H