Stroika Library 3.0d16
 
Loading...
Searching...
No Matches
MemoryAllocator.cpp
1/*
2 * Copyright(c) Sophist Solutions, Inc. 1990-2025. All rights reserved
3 */
4#include "Stroika/Foundation/StroikaPreComp.h"
5
6#include <atomic>
7#include <cstdlib>
8#include <new>
9#include <set>
10
12#include "Stroika/Foundation/Debug/Debugger.h"
13#include "Stroika/Foundation/Execution/Common.h"
14#include "Stroika/Foundation/Execution/Throw.h"
15
16#include "MemoryAllocator.h"
17
18using std::byte;
19
20using namespace Stroika::Foundation;
22using namespace Stroika::Foundation::Memory;
23
25
26namespace {
27 SimpleAllocator_CallLIBCMallocFree sDefaultAllocator_;
28}
29
30// Since this code frequently gets used with 'DEBUG' turned off - and so no assert checking, we may
31// sometimes want to 'force asserts on' at least for this modules checking.
32//#define qSuperAssertChecks_MemAllocator 0
33//#define qSuperAssertChecks_MemAllocator 1
34#ifndef qSuperAssertChecks_MemAllocator
35#define qSuperAssertChecks_MemAllocator qStroika_Foundation_Debug_AssertionsChecked
36#endif
37
38namespace {
39#if qSuperAssertChecks_MemAllocator
40 inline void SUPER_ASSERT_ (bool t)
41 {
43 Assert (t);
44 }
45 else {
46 if (not t) {
47 Debug::DropIntoDebuggerIfPresent ();
48 }
49 }
50 }
51#else
52#define SUPER_ASSERT_(x)
53#endif
54}
55
56/*
57 ********************************************************************************
58 ********************* Memory::SimpleAllocator_CallLIBCMallocFree ***************
59 ********************************************************************************
60 */
61void* SimpleAllocator_CallLIBCMallocFree::Allocate (size_t size)
62{
63 void* p = malloc (size);
64 if (p == nullptr) [[unlikely]] {
65 Execution::Throw (bad_alloc{});
66 }
67 return p;
68}
69
70void SimpleAllocator_CallLIBCMallocFree::Deallocate (void* p)
71{
73 free (p);
74}
75
76/*
77 ********************************************************************************
78 ******************* Memory::SimpleAllocator_CallLIBCNewDelete ******************
79 ********************************************************************************
80 */
81void* SimpleAllocator_CallLIBCNewDelete::Allocate (size_t size)
82{
83 return ::operator new (size);
84}
85
86void SimpleAllocator_CallLIBCNewDelete::Deallocate (void* p)
87{
89 ::operator delete (p);
90}
91
92namespace {
93 const unsigned int kPreGUARD = 0x39;
94 const unsigned int kPost_GUARD = 0x1f;
95 // force alignment to worst we can be required to support, so memory allocated by our object has good alignment
96 struct alignas (double) MemWithExtraStuff {
97 unsigned int fPreGuard;
98 size_t fBlockSize;
99 };
100}
101
102/*
103 ********************************************************************************
104 ************* Memory::SimpleSizeCountingGeneralPurposeAllocator ****************
105 ********************************************************************************
106 */
107SimpleSizeCountingGeneralPurposeAllocator::SimpleSizeCountingGeneralPurposeAllocator ()
108 : fBaseAllocator_{sDefaultAllocator_}
109{
110}
111
112SimpleSizeCountingGeneralPurposeAllocator::SimpleSizeCountingGeneralPurposeAllocator (AbstractGeneralPurposeAllocator& baseAllocator)
113 : fBaseAllocator_{baseAllocator}
114{
115}
116
117SimpleSizeCountingGeneralPurposeAllocator::~SimpleSizeCountingGeneralPurposeAllocator ()
118{
119 //TraceContextBumper trcCtx{"SimpleSizeCountingGeneralPurposeAllocator::~SimpleSizeCountingGeneralPurposeAllocator"};
120 //DbgTrace (L"fNetAllocationCount=%d, fNetAllocatedByteCount=%d", fNetAllocationCount, fNetAllocatedByteCount);
121 // caller must free all entries before destroying allocator
122 Require (fNetAllocationCount_ == 0);
123 Require (fNetAllocatedByteCount_ == 0);
124}
125
126void* SimpleSizeCountingGeneralPurposeAllocator::Allocate (size_t size)
127{
128 size_t effectiveSize = size + sizeof (MemWithExtraStuff) + sizeof (unsigned int);
129 MemWithExtraStuff* p = reinterpret_cast<MemWithExtraStuff*> (fBaseAllocator_.Allocate (effectiveSize));
130 p->fPreGuard = kPreGUARD;
131 p->fBlockSize = size;
132 (void)::memcpy (reinterpret_cast<byte*> (p) + size + sizeof (MemWithExtraStuff), &kPost_GUARD, sizeof (kPost_GUARD));
133 fNetAllocationCount_++;
134 fNetAllocatedByteCount_ += static_cast<int32_t> (size);
135 return (reinterpret_cast<byte*> (p) + sizeof (MemWithExtraStuff));
136}
137
138void SimpleSizeCountingGeneralPurposeAllocator::Deallocate (void* ptr)
139{
140 RequireNotNull (ptr);
141 MemWithExtraStuff* p = reinterpret_cast<MemWithExtraStuff*> (reinterpret_cast<byte*> (ptr) - sizeof (MemWithExtraStuff));
142 SUPER_ASSERT_ (p->fPreGuard == kPreGUARD);
143 SUPER_ASSERT_ (::memcmp (reinterpret_cast<byte*> (p) + p->fBlockSize + sizeof (MemWithExtraStuff), &kPost_GUARD, sizeof (kPost_GUARD)) == 0);
144 --fNetAllocationCount_;
145 fNetAllocatedByteCount_ -= p->fBlockSize;
146 fBaseAllocator_.Deallocate (p);
147}
148
149size_t SimpleSizeCountingGeneralPurposeAllocator::GetNetAllocationCount () const
150{
151 Require (fNetAllocationCount_ >= 0); // bad use of this class - not a bug with the class - if this fails (probably)
152 return static_cast<size_t> (fNetAllocationCount_);
153}
154
155size_t SimpleSizeCountingGeneralPurposeAllocator::GetNetAllocatedByteCount () const
156{
157 Require (fNetAllocatedByteCount_ >= 0); // bad use of this class - not a bug with the class - if this fails (probably)
158 return static_cast<size_t> (fNetAllocatedByteCount_);
159}
160
161/*
162 ********************************************************************************
163 ********************* Memory::LeakTrackingGeneralPurposeAllocator **************
164 ********************************************************************************
165 */
166namespace {
167 using PTRMAP = LeakTrackingGeneralPurposeAllocator::PTRMAP;
168 void ExtractInfo_ (const PTRMAP& m, set<size_t>* sizeSet, size_t* totalAllocated)
169 {
170 RequireNotNull (sizeSet);
171 RequireNotNull (totalAllocated);
172 sizeSet->clear ();
173 *totalAllocated = 0;
174 for (auto i = m.begin (); i != m.end (); ++i) {
175 sizeSet->insert (i->second);
176 (*totalAllocated) += i->second;
177 }
178 }
179 unsigned int ExtractCountUsedForSize_ (const PTRMAP& m, size_t eltSize)
180 {
181 unsigned int result = 0;
182 for (auto i = m.begin (); i != m.end (); ++i) {
183 if (i->second == eltSize) {
184 result++;
185 }
186 }
187 return result;
188 }
189}
190LeakTrackingGeneralPurposeAllocator::LeakTrackingGeneralPurposeAllocator ()
191 : fBaseAllocator_{sDefaultAllocator_}
192{
193}
194
195LeakTrackingGeneralPurposeAllocator::LeakTrackingGeneralPurposeAllocator (AbstractGeneralPurposeAllocator& baseAllocator)
196 : fBaseAllocator_{baseAllocator}
197{
198}
199
200LeakTrackingGeneralPurposeAllocator::~LeakTrackingGeneralPurposeAllocator ()
201{
202 // Avoid DbgTrace (etc) calls here - since when we do 'global op new' debugging, this gets deleted VERY late - after
203 // most other modules (including DbgTrace support) are shutdown)
204 // But - Assert OK ;-)
205 // -LGP 2008-05-27
206 SUPER_ASSERT_ (fAllocations_.size () == 0);
207}
208
209void* LeakTrackingGeneralPurposeAllocator::Allocate (size_t size)
210{
211 void* memptr = fBaseAllocator_.Allocate (size);
212 AssertNotNull (memptr);
213 [[maybe_unused]] lock_guard critSec{fCritSection_};
214 try {
215 fAllocations_.insert ({memptr, size});
216 return memptr;
217 }
218 catch (...) {
219 fBaseAllocator_.Deallocate (memptr);
221 }
222}
223
224void LeakTrackingGeneralPurposeAllocator::Deallocate (void* p)
225{
226 RequireNotNull (p);
227 [[maybe_unused]] lock_guard critSec{fCritSection_};
228 PTRMAP::iterator i = fAllocations_.find (p);
229 SUPER_ASSERT_ (i != fAllocations_.end ());
230 fAllocations_.erase (i);
231 fBaseAllocator_.Deallocate (p);
232}
233
234size_t LeakTrackingGeneralPurposeAllocator::GetNetAllocationCount () const
235{
236 [[maybe_unused]] lock_guard critSec{fCritSection_};
237 return fAllocations_.size ();
238}
239
240size_t LeakTrackingGeneralPurposeAllocator::GetNetAllocatedByteCount () const
241{
242 [[maybe_unused]] lock_guard critSec{fCritSection_};
243 size_t total = 0;
244 for (auto i = fAllocations_.begin (); i != fAllocations_.end (); ++i) {
245 total += i->second;
246 }
247 return total;
248}
249
250LeakTrackingGeneralPurposeAllocator::Snapshot LeakTrackingGeneralPurposeAllocator::GetSnapshot () const
251{
252 [[maybe_unused]] lock_guard critSec{fCritSection_};
253 return Snapshot{fAllocations_};
254}
255
256namespace {
257 void DUMPCurMemStats_ (const LeakTrackingGeneralPurposeAllocator::Snapshot& curSnapshot, const LeakTrackingGeneralPurposeAllocator::Snapshot& sinceSnapshot)
258 {
259 set<size_t> sizes;
260 set<size_t> prevSizes;
261 size_t totalRemainingAlloced = 0;
262 size_t prevTotalRemainingAlloced = 0;
263 ExtractInfo_ (curSnapshot.fAllocations, &sizes, &totalRemainingAlloced);
264 ExtractInfo_ (sinceSnapshot.fAllocations, &prevSizes, &prevTotalRemainingAlloced);
265 DbgTrace ("Net Allocation Count = {} (prev {})"_f, curSnapshot.fAllocations.size (), sinceSnapshot.fAllocations.size ());
266 DbgTrace ("Net Allocation byte Count = {} (prev {})"_f, totalRemainingAlloced, prevTotalRemainingAlloced);
267 if (totalRemainingAlloced > prevTotalRemainingAlloced) {
268 DbgTrace ("Leaked {} bytes"_f, totalRemainingAlloced - prevTotalRemainingAlloced);
269 }
270 else if (prevTotalRemainingAlloced > totalRemainingAlloced) {
271 DbgTrace ("Reverse-Leaked {} bytes"_f, prevTotalRemainingAlloced - totalRemainingAlloced);
272 }
273#if 0
274 {
275 //REALLY COULD USE STROIKA TALLY HERE
276 for (auto si = sizes.begin (); si != sizes.end (); ++si) {
277 unsigned int cnt = 0;
278 for (auto i = fAllocMap.begin (); i != fAllocMap.end (); ++i) {
279 if (i->second == *si) {
280 ++cnt;
281 }
282 }
283 DbgTrace ("items of size {}, count {}"_f, *si, cnt);
284 }
285 }
286#endif
287 {
288 // See how the current values differ from the previous run
289 set<size_t>::const_iterator psi = prevSizes.begin ();
290 for (auto si = sizes.begin (); si != sizes.end ();) {
291 /*
292 * try to iterate two lists at same time - bump the pointer which is further behind
293 */
294 if (psi == prevSizes.end ()) {
295 // then we have 'leak' - new sizes
296 DbgTrace ("Leak: new size bucket {}"_f, *si);
297 ++si;
298 }
299 else {
300 if (*si < *psi) {
301 DbgTrace ("Leak: new size bucket {}"_f, *si);
302 ++si;
303 }
304 else if (*si == *psi) {
305 // then here we just shoudl chekc if same number of entries - and output THAT
306 unsigned int oldCountThisSize = ExtractCountUsedForSize_ (sinceSnapshot.fAllocations, *si);
307 unsigned int newCountThisSize = ExtractCountUsedForSize_ (curSnapshot.fAllocations, *si);
308 if (oldCountThisSize < newCountThisSize) {
309 DbgTrace ("Leak: for bucket size {}, oldCount={}, newCount={}"_f, *si, oldCountThisSize, newCountThisSize);
310 }
311 else if (oldCountThisSize > newCountThisSize) {
312 DbgTrace ("Reverse-Leak: for bucket size {}, oldCount={}, newCount={}"_f, *si, oldCountThisSize, newCountThisSize);
313 }
314 ++si;
315 ++psi;
316 }
317 else {
318 DbgTrace ("Reverse-Leak: old size bucket {}"_f, *psi);
319 ++psi;
320 }
321 }
322 }
323 while (psi != prevSizes.end ()) {
324 DbgTrace ("Reverse-Leak: old size bucket {}"_f, *psi);
325 ++psi;
326 }
327 }
328 }
329}
330void LeakTrackingGeneralPurposeAllocator::DUMPCurMemStats (const Snapshot& sinceSnapshot)
331{
332 Snapshot curSnapshot = GetSnapshot ();
333 TraceContextBumper ctx{"LeakTrackingGeneralPurposeAllocator::DUMPCurMemStats"};
334 DUMPCurMemStats_ (curSnapshot, sinceSnapshot);
335}
336
337LeakTrackingGeneralPurposeAllocator::Snapshot::Snapshot (const PTRMAP& m)
338 : fAllocations{m}
339{
340}
#define AssertNotNull(p)
Definition Assertions.h:333
#define qStroika_Foundation_Debug_AssertionsChecked
The qStroika_Foundation_Debug_AssertionsChecked flag determines if assertions are checked and validat...
Definition Assertions.h:48
#define RequireNotNull(p)
Definition Assertions.h:347
#define DbgTrace
Definition Trace.h:309
void Throw(T &&e2Throw)
identical to builtin C++ 'throw' except that it does helpful, type dependent DbgTrace() messages firs...
Definition Throw.inl:43