This is an automated email from the ASF dual-hosted git repository.

bneradt pushed a commit to branch memarena-static-block
in repository https://gitbox.apache.org/repos/asf/trafficserver-libswoc.git

commit 58f8a15a2b6b7978655cab2b123ea2a0f4f90609
Author: Alan M. Carroll <[email protected]>
AuthorDate: Tue Mar 9 16:43:32 2021 -0600

    Add static block support to MemArena.
---
 code/include/swoc/MemArena.h | 13 +++++++++++++
 code/include/swoc/Scalar.h   |  8 ++++----
 code/src/MemArena.cc         | 34 +++++++++++++++++++++++++++++-----
 doc/code/MemArena.en.rst     | 38 +++++++++++++++++++++++++++++++++++---
 unit_tests/test_MemArena.cc  | 31 +++++++++++++++++++++++++++++++
 5 files changed, 112 insertions(+), 12 deletions(-)

diff --git a/code/include/swoc/MemArena.h b/code/include/swoc/MemArena.h
index 8ac8351..d63f2d5 100644
--- a/code/include/swoc/MemArena.h
+++ b/code/include/swoc/MemArena.h
@@ -167,6 +167,16 @@ public:
    */
   explicit MemArena(size_t n = DEFAULT_BLOCK_SIZE);
 
+  /** Construct using static block.
+   *
+   * @param static_block A block of memory that is non-deletable.
+   *
+   * @a static_block is used as the first block for allocation and is never 
deleted. This makes
+   * it possible to have an instance that allocates from stack memory and only 
allocates from the
+   * heap if the static block becomes full.
+   */
+  explicit MemArena(MemSpan<void> static_block);
+
   /// no copying
   MemArena(self_type const& that) = delete;
 
@@ -379,6 +389,9 @@ protected:
   BlockList _frozen; ///< Previous generation, frozen memory.
   BlockList _active; ///< Current generation. Allocate here.
 
+  /// Static block, if any.
+  Block* _static_block = nullptr;
+
   // Note on _active block list - blocks that become full are moved to the end 
of the list.
   // This means that when searching for a block with space, the first full 
block encountered
   // marks the last block to check. This keeps the set of blocks to check 
short.
diff --git a/code/include/swoc/Scalar.h b/code/include/swoc/Scalar.h
index 276d013..25df940 100644
--- a/code/include/swoc/Scalar.h
+++ b/code/include/swoc/Scalar.h
@@ -293,10 +293,10 @@ public:
   constexpr Counter count() const;
 
   /// The scaled value.
-  constexpr intmax_t value() const;
+  constexpr Counter value() const;
 
   /// User conversion to scaled value.
-  constexpr operator intmax_t() const;
+  constexpr operator Counter() const;
 
   /// Addition operator.
   /// The value is scaled from @a that to @a this.
@@ -416,12 +416,12 @@ Scalar<N, C, T>::count() const -> Counter {
 }
 
 template<intmax_t N, typename C, typename T>
-constexpr intmax_t
+constexpr C
 Scalar<N, C, T>::value() const {
   return _n * SCALE;
 }
 
-template<intmax_t N, typename C, typename T> constexpr Scalar<N, C, 
T>::operator intmax_t() const {
+template<intmax_t N, typename C, typename T> constexpr Scalar<N, C, 
T>::operator C() const {
   return _n * SCALE;
 }
 
diff --git a/code/src/MemArena.cc b/code/src/MemArena.cc
index a46b1a8..8db4996 100644
--- a/code/src/MemArena.cc
+++ b/code/src/MemArena.cc
@@ -15,6 +15,19 @@ inline bool MemArena::Block::satisfies(size_t n, size_t 
align) const {
   return r >= (n + align_padding(this->data() + allocated, align));
 }
 
+MemArena::MemArena(MemSpan<void> static_block) {
+  static constexpr Scalar<16, size_t> MIN_BLOCK_SIZE = round_up(sizeof(Block) 
+ Block::MIN_FREE_SPACE);
+  if (static_block.size() < MIN_BLOCK_SIZE) {
+    throw std::domain_error("MemArena static block is too small.");
+  }
+  // Construct the block data in the block and put it on the list. Make a note 
this is the
+  // static block that shouldn't be deleted.
+  auto space = static_block.size() - sizeof(Block);
+  _static_block = new (static_block.data()) Block(space);
+  _active_reserved = space;
+  _active.prepend(_static_block);
+}
+
 // Need to break these out because the default implementation doesn't clear the
 // integral values in @a that.
 
@@ -22,10 +35,13 @@ MemArena::MemArena(swoc::MemArena::self_type&& that)
     : _active_allocated(that._active_allocated), 
_active_reserved(that._active_reserved)
       , _frozen_allocated(that._frozen_allocated), 
_frozen_reserved(that._frozen_reserved)
       , _reserve_hint(that._reserve_hint), _frozen(std::move(that._frozen))
-      , _active(std::move(that._active)) {
+      , _active(std::move(that._active))
+      , _static_block(that._static_block) {
+  // Clear data in @a that to indicate all of the memory has been moved.
   that._active_allocated = that._active_reserved = 0;
   that._frozen_allocated = that._frozen_reserved = 0;
   that._reserve_hint = 0;
+  that._static_block = nullptr;
 }
 
 MemArena *
@@ -110,6 +126,11 @@ MemArena&
 MemArena::thaw() {
   this->destroy_frozen();
   _frozen_reserved = _frozen_allocated = 0;
+  if (_static_block) {
+    _static_block->discard();
+    _active.prepend(_static_block);
+    _active_reserved += _static_block->remaining();
+  }
   return *this;
 }
 
@@ -152,12 +173,12 @@ MemArena::require(size_t n, size_t align) {
 
 void
 MemArena::destroy_active() {
-  _active.apply([](Block *b) { delete b; }).clear();
+  _active.apply([=](Block *b) { if (b != _static_block) delete b; }).clear();
 }
 
 void
 MemArena::destroy_frozen() {
-  _frozen.apply([](Block *b) { delete b; }).clear();
+  _frozen.apply([=](Block *b) { if (b != _static_block) delete b; }).clear();
 }
 
 MemArena&
@@ -183,19 +204,22 @@ MemArena::discard(size_t hint) {
 
 MemArena::~MemArena() {
   // Destruct in a way that makes it safe for the instance to be in one of its 
own memory blocks.
+  // This means copying members that will be used during the delete.
   Block *ba = _active.head();
   Block *bf = _frozen.head();
+  Block *sb = _static_block;
+
   _active.clear();
   _frozen.clear();
   while (bf) {
     Block *b = bf;
     bf = bf->_link._next;
-    delete b;
+    if (b != sb) delete b;
   }
   while (ba) {
     Block *b = ba;
     ba = ba->_link._next;
-    delete b;
+    if (b != sb) delete b;
   }
 }
 
diff --git a/doc/code/MemArena.en.rst b/doc/code/MemArena.en.rst
index bd033aa..1194090 100644
--- a/doc/code/MemArena.en.rst
+++ b/doc/code/MemArena.en.rst
@@ -41,11 +41,12 @@ This is useful when the goal is any (or all) of
 *  de-allocating memory in bulk.
 
 Note that intrusive containers such as :ref:`IntrusiveDList 
<swoc-intrusive-list>` and
-:ref:`IntrusiveHashMap <swoc-intrusive-hashmap>` work well with a |MemArena|. 
If the container and its elements are placed
-in the |MemArena| then no specific cleanup is needed beyond destroying the 
|MemArena|.
+:ref:`IntrusiveHashMap <swoc-intrusive-hashmap>` work well with a |MemArena|. 
If the container and
+its elements are placed in the |MemArena| then no specific cleanup is needed 
beyond destroying the
+|MemArena|.
 
 A |MemArena| can also be `inverted <arena-inversion>`_. This means placing the 
|MemArena| instance
-in its own memory pool so that the |MemArena| and associated objects can be 
created with a single
+in its own memory pool so the |MemArena| and associated objects can be created 
with a single
 base library memory allocation and cleaned up with a single :code:`delete`.
 
 Usage
@@ -138,6 +139,37 @@ memory space is reused. It is also guaranteed that if the 
next call to :libswoc:
 remnant. This makes it possible to do speculative work in the arena and 
"commit" it (via allocation)
 after the work is successful, or abandon it if not.
 
+Static Memory
+=============
+
+|MemArena| has :libswoc:`a constructor <MemArena::MemArena(MemSpan\<void\>)>` 
that enables using a
+pre-defined block of memory. This memory is never released or destructed by 
the |MemArena|. This is
+useful in local contexts where an abitrary amount of memory *may* be needed 
but commonly less than a
+known amount of memory will actually be needed. In such a case the |MemArena| 
can be seeded with a
+block of memory (static or on the stack) of that size so that no allocation is 
done in the common
+case, only in the few cases where it is needed without special handling. Such 
a increase in
+performance is essentially the only reason to use this feature.
+
+The static block must be large enough to hold the internal block headers and 
still have at least
+the minimum free space available. As of this writing this requires a buffer of 
at least 64 bytes.
+
+If (roughly) two kilobytes would normally be enough, that could be done with ::
+
+  static constexpr size_t SIZE = 2048;
+  std::byte buffer[SIZE];
+  MemArena arena{{buffer, SIZE}};
+
+All methods are available and functional in this case, including freezing and 
thawing. Any
+allocations while frozen will never be in the static block, as it won't be 
recycled until the thaw.
+Generally this is not a good technique as a situation where freeze / thaw is 
useful is almost
+certainly not a situation where a static block is useful and vice versa.
+
+Although such an arena could be inverted (and thereby placed in the static 
block) this is also very
+unlikely to be useful, as the stack space for the arena would still be 
required during construction.
+
+The expectation is such an instance would be scoped locally to a function or 
method and destroyed
+upon return, being used only for temporary storage.
+
 Examples
 ========
 
diff --git a/unit_tests/test_MemArena.cc b/unit_tests/test_MemArena.cc
index c2df081..f82549b 100644
--- a/unit_tests/test_MemArena.cc
+++ b/unit_tests/test_MemArena.cc
@@ -512,4 +512,35 @@ TEST_CASE("PMR 3", "[libswoc][arena][pmr]") {
   REQUIRE(pre > post);
 }
 
+TEST_CASE("MemArena static", "[libswoc][MemArena][static]")
+{
+  static constexpr size_t SIZE = 2048;
+  std::byte buffer[SIZE];
+  MemArena arena{{buffer, SIZE}};
+
+  REQUIRE(arena.remaining() > 0);
+  REQUIRE(arena.remaining() < SIZE);
+  REQUIRE(arena.size() == 0);
+
+  // Allocate something and make sure it's in the static area.
+  auto span = arena.alloc(1024);
+  REQUIRE(true == (buffer <= span.data() && span.data() < buffer + SIZE));
+  span = arena.remnant(); // require the remnant to still be in the buffer.
+  REQUIRE(true == (buffer <= span.data() && span.data() < buffer + SIZE));
+
+  // This can't fit, must be somewhere other than the buffer.
+  span = arena.alloc(SIZE);
+  REQUIRE(false == (buffer <= span.data() && span.data() < buffer + SIZE));
+
+  MemArena arena2{std::move(arena)};
+  REQUIRE(arena2.size() > 0);
+
+  arena2.freeze();
+  arena2.thaw();
+
+  REQUIRE(arena.size() == 0);
+  REQUIRE(arena2.size() == 0);
+  // Now let @a arena2 destruct.
+}
+
 #endif // has memory_resource header.

Reply via email to