10#include "Stroika/Foundation/Execution/Common.h" 
   12#include "Stroika/Foundation/Math/Common.h" 
   13#include "Stroika/Foundation/Memory/Common.h" 
   20#if !defined(qStroika_Foundation_Memory_BlockAllocator_UseLockFree_) 
   21#define qStroika_Foundation_Memory_BlockAllocator_UseLockFree_ 1 
   31#if !defined(qStroika_Foundation_Memory_BlockAllocator_UseMallocDirectly_) 
   32#define qStroika_Foundation_Memory_BlockAllocator_UseMallocDirectly_ 1 
   35#if qStroika_Foundation_Memory_BlockAllocator_UseMallocDirectly_ 
   38#include "Stroika/Foundation/Execution/Throw.h" 
   41namespace Stroika::Foundation::Memory {
 
   45#if qStroika_Foundation_Memory_BlockAllocator_UseLockFree_ 
   64        static void* 
const kLockedSentinel_ = (
void*)1; 
 
   75        using LockType_ = conditional_t<kUseSpinLock_, Execution::SpinLock, mutex>;
 
   77        inline LockType_& GetLock_ ()
 
   79            static LockType_ sLock_;
 
   83        void DoDeleteHandlingLocksExceptionsEtc_ (
void* p, 
void** staticNextLinkP) 
noexcept;
 
   92        template <
size_t SIZE>
 
   93        class BlockAllocationPool_ {
 
   96            static void* Allocate (
size_t n);
 
   98            static void Deallocate (
void* p) 
noexcept;
 
   99            static void Compact ();
 
  102            alignas (
void*) 
static inline conditional_t<qStroika_Foundation_Memory_BlockAllocator_UseLockFree_, atomic<void*>, 
void*> sHeadLink_{
nullptr};
 
  105            static constexpr size_t ComputeChunks_ ();
 
  106            static void**           GetMem_Util_ ();
 
  115    template <
size_t SIZE>
 
  116    inline void* Private_::BlockAllocationPool_<SIZE>::Allocate ([[maybe_unused]] 
size_t n)
 
  118        static_assert (SIZE >= 
sizeof (
void*), 
"SIZE >= sizeof (void*)");
 
  121#if qStroika_Foundation_Memory_BlockAllocator_UseLockFree_ 
  124        constexpr bool kTryMemoryOrderOptimizations_ = 
true; 
 
  133            p = sHeadLink_.exchange (Private_::kLockedSentinel_, memory_order_acq_rel);
 
  134            if (p == Private_::kLockedSentinel_) [[unlikely]] {
 
  136                this_thread::yield (); 
 
  141        if (p == 
nullptr) [[unlikely]] {
 
  151        static_assert (
sizeof (
void*) == 
sizeof (atomic<void*>), 
"atomic doesn't change size");
 
  153        void* next = 
reinterpret_cast<const atomic<void*>*
> (p)->load (memory_order_acquire);
 
  154        if constexpr (kTryMemoryOrderOptimizations_) {
 
  155            Verify (sHeadLink_.exchange (next, memory_order_release) == Private_::kLockedSentinel_); 
 
  158            Verify (sHeadLink_.exchange (next, memory_order_acq_rel) == Private_::kLockedSentinel_); 
 
  162        [[maybe_unused]] lock_guard critSec{Private_::GetLock_ ()};
 
  167        if (sHeadLink_ == 
nullptr) [[unlikely]] {
 
  168            sHeadLink_ = GetMem_Util_ ();
 
  170        void* result = sHeadLink_;
 
  175        sHeadLink_ = (*(
void**)sHeadLink_);
 
  179    template <
size_t SIZE>
 
  180    inline void Private_::BlockAllocationPool_<SIZE>::Deallocate (
void* p) 
noexcept 
  182        static_assert (SIZE >= 
sizeof (
void*), 
"SIZE >= sizeof (void*)");
 
  184#if qStroika_Foundation_Memory_BlockAllocator_UseLockFree_ 
  186        constexpr bool kTryMemoryOrderOptimizations_ = 
true; 
 
  195            prevHead = sHeadLink_.exchange (Private_::kLockedSentinel_, memory_order_acq_rel);
 
  196            if (prevHead == Private_::kLockedSentinel_) [[unlikely]] {
 
  198                this_thread::yield (); 
 
  207        static_assert (
sizeof (
void*) == 
sizeof (atomic<void*>), 
"atomic doesn't change size");
 
  209        reinterpret_cast<atomic<void*>*
> (newHead)->store (prevHead, memory_order_release);
 
  210        if constexpr (kTryMemoryOrderOptimizations_) {
 
  211            Verify (sHeadLink_.exchange (newHead, memory_order_release) == Private_::kLockedSentinel_); 
 
  214            Verify (sHeadLink_.exchange (newHead, memory_order_acq_rel) == Private_::kLockedSentinel_); 
 
  217        Private_::DoDeleteHandlingLocksExceptionsEtc_ (p, &sHeadLink_);
 
  220    template <
size_t SIZE>
 
  221    void Private_::BlockAllocationPool_<SIZE>::Compact ()
 
  223#if qStroika_Foundation_Memory_BlockAllocator_UseLockFree_ 
  226        [[maybe_unused]] lock_guard critSec{Private_::GetLock_ ()};
 
  229        const size_t kChunks = ComputeChunks_ ();
 
  230        Assert (kChunks >= 1);
 
  233            links.reserve (kChunks * 2);
 
  234            void* link = sHeadLink_;
 
  235            while (link != 
nullptr) {
 
  237                if (links.size () == links.capacity ()) {
 
  238                    links.reserve (links.size () * 2);
 
  240                links.push_back (link);
 
  241                link = *(
void**)link;
 
  248        sort (links.begin (), links.end ());
 
  253        size_t originalSize = links.size ();
 
  255        while (index < links.size ()) {
 
  256            byte*  deleteCandidate = (
byte*)links[index];
 
  257            bool   canDelete       = 
true;
 
  259            for (; i < kChunks; ++i) {
 
  260                if ((index + i) >= links.size ())
 
  262                byte* next = (
byte*)links[index + i];
 
  263                byte* prev = (
byte*)links[index + i - 1];
 
  264                if (canDelete and ((next - prev) != SIZE)) {
 
  267                if (
static_cast<size_t> (abs (next - deleteCandidate) / SIZE) >= kChunks) {
 
  268                    Assert (not canDelete);
 
  273                links.erase (links.begin () + index, links.begin () + index + kChunks);
 
  274                if constexpr (qStroika_Foundation_Memory_BlockAllocator_UseMallocDirectly_) {
 
  275                    ::free ((
void*)deleteCandidate);
 
  278                    delete static_cast<byte*
> ((
void*)deleteCandidate);
 
  287        sHeadLink_ = (links.size () == 0) ? 
nullptr : links[0];
 
  288        if (links.size () != originalSize and links.size () > 0) {
 
  290            void** curLink = (
void**)sHeadLink_;
 
  291            for (
size_t i = 1; i < links.size (); ++i) {
 
  293                curLink  = (
void**)*curLink;
 
  299    template <
size_t SIZE>
 
  300    constexpr size_t Private_::BlockAllocationPool_<SIZE>::ComputeChunks_ ()
 
  306        constexpr size_t kTargetMallocSize_ = 16360; 
 
  308        return Math::AtLeast (
static_cast<size_t> (kTargetMallocSize_ / SIZE), 
static_cast<size_t> (10));
 
  310    template <
size_t SIZE>
 
  311    inline void** Private_::BlockAllocationPool_<SIZE>::GetMem_Util_ ()
 
  313        Require (SIZE >= 
sizeof (
void*));
 
  315        constexpr size_t kChunks = ComputeChunks_ ();
 
  316        Assert (kChunks >= 1);
 
  322        if constexpr (qStroika_Foundation_Memory_BlockAllocator_UseMallocDirectly_) {
 
  323            DISABLE_COMPILER_GCC_WARNING_START (
"GCC diagnostic ignored \"-Wmaybe-uninitialized\"") 
 
  324            newLinks = (
void**)::malloc (kChunks * SIZE);
 
  325            Execution::ThrowIfNull (newLinks);
 
  326            DISABLE_COMPILER_GCC_WARNING_END ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") 
 
  329            newLinks = (
void**)
new char[kChunks * SIZE];
 
  339            struct linkedListElt {
 
  340                linkedListElt* fNext;
 
  342            auto nextLinkAlias = 
reinterpret_cast<linkedListElt*
> (newLinks);
 
  344            for (
size_t i = 1; i < kChunks; ++i) {
 
  345                auto nextNextLink    = 
reinterpret_cast<linkedListElt*
> (
reinterpret_cast<byte*
> (nextLinkAlias) + SIZE);
 
  346                nextLinkAlias->fNext = nextNextLink;
 
  347                nextLinkAlias        = nextNextLink;
 
  350            nextLinkAlias->fNext = 
nullptr;
 
  360    template <
typename T>
 
  361    template <
typename U>
 
  362    constexpr BlockAllocator<T>::BlockAllocator (
const BlockAllocator<U>&) 
noexcept 
  365    template <
typename T>
 
  368        using Private_::BlockAllocationPool_;
 
  370#if qStroika_Foundation_Memory_PreferBlockAllocation 
  371        T* result = 
reinterpret_cast<T*
> (BlockAllocationPool_<AdjustSizeForPool_ ()>::Allocate (
sizeof (T)));
 
  373        T* result = 
reinterpret_cast<T*
> (::operator 
new (
sizeof (T)));
 
  376        Ensure (
reinterpret_cast<ptrdiff_t
> (result) % 
alignof (T) == 0); 
 
 
  379    template <
typename T>
 
  383        using Private_::BlockAllocationPool_;
 
  384#if qStroika_Foundation_Memory_PreferBlockAllocation 
  385        if (p != 
nullptr) [[likely]] {
 
  386            BlockAllocationPool_<AdjustSizeForPool_ ()>::Deallocate (p);
 
  389        ::operator 
delete (p);
 
 
  392    template <
typename T>
 
  395        using Private_::BlockAllocationPool_;
 
  396#if qStroika_Foundation_Memory_PreferBlockAllocation 
  397        BlockAllocationPool_<AdjustSizeForPool_ ()>::Compact ();
 
 
  400    template <
typename T>
 
  403        return Math::RoundUpTo (
sizeof (T), 
sizeof (
void*));
 
  405    template <
typename T>
 
  406    inline bool BlockAllocator<T>::operator== (
const BlockAllocator& rhs)
 const 
#define RequireNotNull(p)
 
Include this file VERY EARLY ON - before including stuff like <cstdio> - to allow use of Valgrind (so...
 
nonvirtual void deallocate(T *p, std::size_t n) noexcept
 
nonvirtual T * allocate(std::size_t n)
 
constexpr bool kSpinLock_IsFasterThan_mutex