jilinzhou

object_pool.h

Sep 18th, 2025
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 9.90 KB | None | 0 0
  1. #include <atomic>
  2. #include <functional>
  3. #include <memory_resource>
  4. #include <mutex>
  5. #include <type_traits>
  6.  
  7. namespace modern {
  8.  
  9. // Functors namespace (similar to ObjectPoolFunctors)
  10. struct ObjectPoolFunctors {
  11.   using DefaultCreator =
  12.       std::function<void(void *, std::pmr::memory_resource *)>;
  13.  
  14.   template <class TYPE> class Nil {
  15.   public:
  16.     void operator()(TYPE *) const {}
  17.   };
  18.  
  19.   template <class TYPE> class Reset {
  20.   public:
  21.     void operator()(TYPE *object) const { object->reset(); }
  22.   };
  23.  
  24.   template <class TYPE> class Clear {
  25.   public:
  26.     void operator()(TYPE *object) const { object->clear(); }
  27.   };
  28. };
  29.  
  30. // Default creator implementation
  31. template <class TYPE> class DefaultCreator {
  32. private:
  33.   std::pmr::polymorphic_allocator<TYPE> allocator_;
  34.  
  35. public:
  36.   explicit DefaultCreator(
  37.       std::pmr::memory_resource *mr = std::pmr::get_default_resource())
  38.       : allocator_(mr) {}
  39.  
  40.   void operator()(void *arena, std::pmr::memory_resource *mr) {
  41.     if constexpr (std::uses_allocator_v<
  42.                       TYPE, std::pmr::polymorphic_allocator<TYPE>>) {
  43.       // TYPE uses allocator - pass it to constructor
  44.       new (arena) TYPE(std::pmr::polymorphic_allocator<TYPE>(mr));
  45.     } else {
  46.       // TYPE doesn't use allocator - default construct
  47.       new (arena) TYPE{};
  48.     }
  49.   }
  50. };
  51.  
  52. // Main ObjectPool class
  53. template <class TYPE, class CREATOR = DefaultCreator<TYPE>,
  54.           class RESETTER = ObjectPoolFunctors::Nil<TYPE>>
  55. class ObjectPool {
  56. private:
  57.   // Node structure (similar to Bloomberg's design)
  58.   union ObjectNode {
  59.     struct {
  60.       std::atomic<ObjectNode *> next_p;
  61.       std::atomic<int> refCount;
  62.     } inUse;
  63.     alignas(TYPE) char dummy[sizeof(TYPE)]; // Ensure proper alignment
  64.   };
  65.  
  66.   // Block structure for batch allocation
  67.   struct BlockNode {
  68.     BlockNode *next_p;
  69.     int numObjects;
  70.   };
  71.  
  72.   // Memory layout constants
  73.   static constexpr int k_ROUNDED_NUM_OBJECTS =
  74.       (sizeof(TYPE) + sizeof(ObjectNode) - 1) / sizeof(ObjectNode);
  75.   static constexpr int k_NUM_OBJECTS_PER_FRAME = 1 + k_ROUNDED_NUM_OBJECTS;
  76.   static constexpr int k_GROW_FACTOR = 2;
  77.   static constexpr int k_MAX_NUM_OBJECTS = -32;
  78.  
  79.   // Data members
  80.   std::atomic<ObjectNode *> freeObjectsList_{nullptr};
  81.   CREATOR objectCreator_;
  82.   RESETTER objectResetter_;
  83.   int numReplenishObjects_;
  84.   std::atomic<int> numAvailableObjects_{0};
  85.   std::atomic<int> numObjects_{0};
  86.   BlockNode *blockList_{nullptr};
  87.   std::pmr::memory_resource *memoryResource_;
  88.   std::mutex mutex_;
  89.  
  90.   // Exception-safe cleanup helper
  91.   class AutoCleanup {
  92.     BlockNode *block_;
  93.     ObjectNode *head_;
  94.     std::pmr::memory_resource *mr_;
  95.     int numNodes_;
  96.  
  97.   public:
  98.     AutoCleanup(BlockNode *block, ObjectNode *head,
  99.                 std::pmr::memory_resource *mr, int numNodes = 0)
  100.         : block_(block), head_(head), mr_(mr), numNodes_(numNodes) {}
  101.  
  102.     ~AutoCleanup() {
  103.       if (head_) {
  104.         ObjectNode *current = head_ + 1; // Skip to first object
  105.         for (int i = 0; i < numNodes_; ++i) {
  106.           reinterpret_cast<TYPE *>(current + 1)->~TYPE();
  107.           current += k_NUM_OBJECTS_PER_FRAME;
  108.         }
  109.         mr_->deallocate(block_,
  110.                         sizeof(BlockNode) + sizeof(ObjectNode) * numNodes_ *
  111.                                                 k_NUM_OBJECTS_PER_FRAME,
  112.                         alignof(BlockNode));
  113.       }
  114.     }
  115.  
  116.     AutoCleanup &operator++() {
  117.       ++numNodes_;
  118.       return *this;
  119.     }
  120.     void release() {
  121.       head_ = nullptr;
  122.       block_ = nullptr;
  123.     }
  124.   };
  125.  
  126.   void replenish() {
  127.     int numObjects = numReplenishObjects_ >= 0 ? numReplenishObjects_
  128.                                                : -numReplenishObjects_;
  129.     addObjects(numObjects);
  130.  
  131.     if (numReplenishObjects_ < 0) {
  132.       if (numReplenishObjects_ > k_MAX_NUM_OBJECTS) {
  133.         numReplenishObjects_ *= k_GROW_FACTOR;
  134.       } else {
  135.         numReplenishObjects_ = -numReplenishObjects_;
  136.       }
  137.     }
  138.   }
  139.  
  140.   void addObjects(int numObjects) {
  141.     const size_t bytesPerBlock =
  142.         sizeof(BlockNode) +
  143.         sizeof(ObjectNode) * numObjects * k_NUM_OBJECTS_PER_FRAME;
  144.  
  145.     // Allocate block using PMR
  146.     BlockNode *start = static_cast<BlockNode *>(
  147.         memoryResource_->allocate(bytesPerBlock, alignof(BlockNode)));
  148.  
  149.     start->next_p = blockList_;
  150.     start->numObjects = numObjects;
  151.  
  152.     ObjectNode *last = reinterpret_cast<ObjectNode *>(start + 1);
  153.     AutoCleanup guard(start, last, memoryResource_, 0);
  154.  
  155.     // Create objects
  156.     for (int i = 0; i < numObjects; ++i, ++guard) {
  157.       last->inUse.next_p.store(last + k_NUM_OBJECTS_PER_FRAME,
  158.                                std::memory_order_relaxed);
  159.       last->inUse.refCount.store(0, std::memory_order_relaxed);
  160.  
  161.       // Use creator to construct object
  162.       objectCreator_(last + 1, memoryResource_);
  163.  
  164.       last += k_NUM_OBJECTS_PER_FRAME;
  165.     }
  166.     last -= k_NUM_OBJECTS_PER_FRAME;
  167.     last->inUse.refCount.store(0, std::memory_order_relaxed);
  168.  
  169.     guard.release();
  170.     blockList_ = start;
  171.  
  172.     // Link to free list (lock-free)
  173.     ObjectNode *firstNode = reinterpret_cast<ObjectNode *>(start + 1);
  174.     ObjectNode *expected = freeObjectsList_.load(std::memory_order_acquire);
  175.     do {
  176.       last->inUse.next_p.store(expected, std::memory_order_relaxed);
  177.     } while (!freeObjectsList_.compare_exchange_weak(
  178.         expected, firstNode, std::memory_order_release,
  179.         std::memory_order_acquire));
  180.  
  181.     numObjects_.fetch_add(numObjects, std::memory_order_relaxed);
  182.     numAvailableObjects_.fetch_add(numObjects, std::memory_order_relaxed);
  183.   }
  184.  
  185. public:
  186.   // Constructors
  187.   explicit ObjectPool(int growBy = -1, std::pmr::memory_resource *mr =
  188.                                            std::pmr::get_default_resource())
  189.       : objectCreator_(mr), numReplenishObjects_(growBy), memoryResource_(mr) {
  190.     assert(growBy != 0);
  191.   }
  192.  
  193.   explicit ObjectPool(
  194.       const CREATOR &creator, int growBy = -1,
  195.       std::pmr::memory_resource *mr = std::pmr::get_default_resource())
  196.       : objectCreator_(creator), numReplenishObjects_(growBy),
  197.         memoryResource_(mr) {
  198.     assert(growBy != 0);
  199.   }
  200.  
  201.   ObjectPool(const CREATOR &creator, const RESETTER &resetter, int growBy = -1,
  202.              std::pmr::memory_resource *mr = std::pmr::get_default_resource())
  203.       : objectCreator_(creator), objectResetter_(resetter),
  204.         numReplenishObjects_(growBy), memoryResource_(mr) {
  205.     assert(growBy != 0);
  206.   }
  207.  
  208.   ~ObjectPool() {
  209.     // Destroy all objects
  210.     for (BlockNode *block = blockList_; block; block = block->next_p) {
  211.       int numObjects = block->numObjects;
  212.       ObjectNode *node = reinterpret_cast<ObjectNode *>(block + 1);
  213.  
  214.       for (int i = 0; i < numObjects; ++i) {
  215.         reinterpret_cast<TYPE *>(node + 1)->~TYPE();
  216.         node += k_NUM_OBJECTS_PER_FRAME;
  217.       }
  218.     }
  219.  
  220.     // Deallocate blocks
  221.     while (blockList_) {
  222.       BlockNode *next = blockList_->next_p;
  223.       const size_t bytesPerBlock =
  224.           sizeof(BlockNode) +
  225.           sizeof(ObjectNode) * blockList_->numObjects * k_NUM_OBJECTS_PER_FRAME;
  226.       memoryResource_->deallocate(blockList_, bytesPerBlock,
  227.                                   alignof(BlockNode));
  228.       blockList_ = next;
  229.     }
  230.   }
  231.  
  232.   // Get object (lock-free fast path)
  233.   TYPE *getObject() {
  234.     ObjectNode *node;
  235.  
  236.     do {
  237.       node = freeObjectsList_.load(std::memory_order_acquire);
  238.       if (!node) {
  239.         std::lock_guard<std::mutex> lock(mutex_);
  240.         node = freeObjectsList_.load(std::memory_order_relaxed);
  241.         if (!node) {
  242.           replenish();
  243.           continue;
  244.         }
  245.       }
  246.  
  247.       // Try to acquire node with atomic reference counting
  248.       int expected = 0;
  249.       if (node->inUse.refCount.compare_exchange_weak(
  250.               expected, 2, std::memory_order_acq_rel,
  251.               std::memory_order_relaxed)) {
  252.  
  253.         // Try to remove from free list
  254.         ObjectNode *next = node->inUse.next_p.load(std::memory_order_relaxed);
  255.         if (freeObjectsList_.compare_exchange_weak(node, next,
  256.                                                    std::memory_order_release,
  257.                                                    std::memory_order_acquire)) {
  258.           break;
  259.         }
  260.  
  261.         // Failed to remove, restore ref count
  262.         node->inUse.refCount.store(0, std::memory_order_release);
  263.       }
  264.     } while (true);
  265.  
  266.     node->inUse.next_p.store(nullptr, std::memory_order_relaxed);
  267.     numAvailableObjects_.fetch_sub(1, std::memory_order_relaxed);
  268.     return reinterpret_cast<TYPE *>(node + 1);
  269.   }
  270.  
  271.   // Release object back to pool
  272.   void releaseObject(TYPE *object) {
  273.     ObjectNode *node = reinterpret_cast<ObjectNode *>(object) - 1;
  274.  
  275.     // Reset object state
  276.     objectResetter_(object);
  277.  
  278.     // Return to free list
  279.     node->inUse.refCount.store(0, std::memory_order_relaxed);
  280.  
  281.     ObjectNode *expected = freeObjectsList_.load(std::memory_order_relaxed);
  282.     do {
  283.       node->inUse.next_p.store(expected, std::memory_order_relaxed);
  284.     } while (!freeObjectsList_.compare_exchange_weak(
  285.         expected, node, std::memory_order_release, std::memory_order_relaxed));
  286.  
  287.     numAvailableObjects_.fetch_add(1, std::memory_order_relaxed);
  288.   }
  289.  
  290.   // Capacity management
  291.   void increaseCapacity(int numObjects) {
  292.     if (numObjects > 0) {
  293.       std::lock_guard<std::mutex> lock(mutex_);
  294.       addObjects(numObjects);
  295.     }
  296.   }
  297.  
  298.   void reserveCapacity(int numObjects) {
  299.     std::lock_guard<std::mutex> lock(mutex_);
  300.     int needed = numObjects - numObjects_.load(std::memory_order_relaxed);
  301.     if (needed > 0) {
  302.       addObjects(needed);
  303.     }
  304.   }
  305.  
  306.   // Accessors
  307.   int numAvailableObjects() const {
  308.     return numAvailableObjects_.load(std::memory_order_relaxed);
  309.   }
  310.  
  311.   int numObjects() const { return numObjects_.load(std::memory_order_relaxed); }
  312.  
  313.   std::pmr::memory_resource *memory_resource() const { return memoryResource_; }
  314. };
  315.  
  316. } // namespace modern
Advertisement
Add Comment
Please, Sign In to add comment