337 useNewCapacity = Foundation::Containers::Support::ReserveTweaks::GetScaledUpCapacity (useNewCapacity,
sizeof (T),
338 Math::AtLeast<size_t> (BUF_SIZE, 1));
343 Assert (useNewCapacity >= newCapacity);
344 if (useNewCapacity != oldCapacity) [[unlikely]] {
345 bool oldInPlaceBuffer = oldCapacity <= BUF_SIZE;
346 bool newInPlaceBuffer = useNewCapacity <= BUF_SIZE;
347 if constexpr (is_trivially_copyable_v<T>) {
349 if (not oldInPlaceBuffer and not newInPlaceBuffer) {
351 fLiveData_ =
reinterpret_cast<T*
> (Reallocate_ (LiveDataAsAllocatedBytes_ (), SizeInBytes_ (useNewCapacity)));
352 fCapacityOfFreeStoreAllocation_ = useNewCapacity;
354 else if (oldInPlaceBuffer and newInPlaceBuffer) {
357 else if (oldInPlaceBuffer and not newInPlaceBuffer) {
359 byte* newPtr = Allocate_ (SizeInBytes_ (useNewCapacity));
361 Assert (this->begin () !=
reinterpret_cast<T*
> (newPtr));
362 Assert (
static_cast<size_t> (this->end () - this->begin ()) <= useNewCapacity);
363 uninitialized_copy (this->begin (), this->end (),
reinterpret_cast<T*
> (newPtr));
364 fLiveData_ =
reinterpret_cast<T*
> (newPtr);
365 if (not newInPlaceBuffer) {
366 fCapacityOfFreeStoreAllocation_ = useNewCapacity;
369 else if (not oldInPlaceBuffer and newInPlaceBuffer) {
371 byte* newPtr = std::begin (fInlinePreallocatedBuffer_);
373 Assert (this->begin () !=
reinterpret_cast<T*
> (newPtr));
374 Assert (
static_cast<size_t> (this->end () - this->begin ()) <= useNewCapacity);
375 uninitialized_copy (this->begin (), this->end (),
reinterpret_cast<T*
> (newPtr));
376 Deallocate_ (LiveDataAsAllocatedBytes_ ());
377 fLiveData_ =
reinterpret_cast<T*
> (newPtr);
382 if (oldInPlaceBuffer != newInPlaceBuffer or (not newInPlaceBuffer)) {
383 byte* newPtr = newInPlaceBuffer ? std::begin (fInlinePreallocatedBuffer_) : Allocate_ (SizeInBytes_ (useNewCapacity));
386 Assert (this->begin () !=
reinterpret_cast<T*
> (newPtr));
387 Assert (
static_cast<size_t> (this->end () - this->begin ()) <= useNewCapacity);
388 uninitialized_copy (this->begin (), this->end (),
reinterpret_cast<T*
> (newPtr));
391 DestroyElts_ (this->begin (), this->end ());
394 if (not oldInPlaceBuffer) {
395 Assert (not UsingInlinePreallocatedBuffer_ ());
396 Deallocate_ (LiveDataAsAllocatedBytes_ ());
399 fLiveData_ =
reinterpret_cast<T*
> (newPtr);
400 if (not newInPlaceBuffer) {
401 fCapacityOfFreeStoreAllocation_ = useNewCapacity;
406 Ensure ((useNewCapacity <= BUF_SIZE and capacity () == BUF_SIZE) or (useNewCapacity > BUF_SIZE and useNewCapacity == capacity ()));
409 template <
typename T,
size_t BUF_SIZE>
412 Ensure (fSize_ <= capacity ());
415 template <
typename T,
size_t BUF_SIZE>
418 Ensure (fSize_ <= capacity ());
421 template <
typename T,
size_t BUF_SIZE>
426 template <
typename T,
size_t BUF_SIZE>
429 Require (i < fSize_);
430 return *(fLiveData_ + i);
432 template <
typename T,
size_t BUF_SIZE>
435 Require (i < fSize_);
436 return *(fLiveData_ + i);
438 template <
typename T,
size_t BUF_SIZE>
443 template <
typename T,
size_t BUF_SIZE>
448 template <
typename T,
size_t BUF_SIZE>
449 template <ISpanOfT<T> SPAN_T>
450 inline void InlineBuffer<T, BUF_SIZE>::Insert (
size_t at,
const SPAN_T& copyFrom)
453 size_t n2Add = copyFrom.size ();
454 size_t newS = s + n2Add;
455 if (not this->HasEnoughCapacity_ (newS)) [[unlikely]] {
458 Assert (this->HasEnoughCapacity_ (newS));
459 this->fSize_ = Memory::Insert (span{this->begin (), size ()}, span{this->begin (), capacity ()}, at, copyFrom).size ();
460 Assert (this->fSize_ == newS);
462 template <
typename T,
size_t BUF_SIZE>
463 inline void InlineBuffer<T, BUF_SIZE>::Insert (
size_t at,
const T& item)
465 Insert (at, span{&item, 1});
467 template <
typename T,
size_t BUF_SIZE>
470 Insert (i - begin (), span{from, to});
472 template <
typename T,
size_t BUF_SIZE>
477 if (not this->HasEnoughCapacity_ (newS)) [[unlikely]] {
480 if constexpr (is_trivially_copyable_v<T>) {
484 uninitialized_copy (&e, &e + 1, this->begin () + s);
489 template <
typename T,
size_t BUF_SIZE>
490 template <ISpanOfT<T> SPAN_T>
494 size_t newS = s + copyFrom.size ();
495 if (not this->HasEnoughCapacity_ (newS)) [[unlikely]] {
498 Assert (this->HasEnoughCapacity_ (newS));
499 if constexpr (is_trivially_copyable_v<T>) {
500 CopySpanData (copyFrom, span{this->begin () + s, copyFrom.size ()});
503 uninitialized_copy (copyFrom.begin (), copyFrom.end (), this->begin () + s);
507 template <
typename T,
size_t BUF_SIZE>
508 template <ISpan SPAN_T>
512 size_t newS = s + copyFrom.size ();
513 if (not this->HasEnoughCapacity_ (newS)) [[unlikely]] {
516 Assert (this->HasEnoughCapacity_ (newS));
517 auto outPtr = this->begin () + s;
518 if constexpr (is_trivially_copyable_v<T>) {
519 CopySpanData (copyFrom, span{outPtr, copyFrom.size ()});
522 uninitialized_copy (copyFrom.begin (), copyFrom.end (), outPtr);
526 template <
typename T,
size_t BUF_SIZE>