pion-net  4.0.9
PionPoolAllocator.hpp
1 // -----------------------------------------------------------------------
2 // pion-common: a collection of common libraries used by the Pion Platform
3 // -----------------------------------------------------------------------
4 // Copyright (C) 2007-2008 Atomic Labs, Inc. (http://www.atomiclabs.com)
5 //
6 // Distributed under the Boost Software License, Version 1.0.
7 // See http://www.boost.org/LICENSE_1_0.txt
8 //
9 
10 #ifndef __PION_PIONPOOLALLOCATOR_HEADER__
11 #define __PION_PIONPOOLALLOCATOR_HEADER__
12 
13 #include <cstdlib>
14 #include <boost/array.hpp>
15 #include <boost/scoped_ptr.hpp>
16 #include <boost/static_assert.hpp>
17 #include <boost/noncopyable.hpp>
18 #include <boost/thread/mutex.hpp>
19 #include <boost/pool/pool.hpp>
20 #include <pion/PionConfig.hpp>
21 #include <pion/PionException.hpp>
22 
23 #if defined(PION_HAVE_MALLOC_TRIM)
24  #include <malloc.h>
25 #endif
26 
28 #if defined(PION_HAVE_LOCKFREE)
29 #ifdef _MSC_VER
30  #pragma warning(push)
31  #pragma warning(disable: 4800) // forcing value to bool 'true' or 'false' (performance warning)
32 #endif
33  #include <boost/lockfree/detail/tagged_ptr.hpp>
34 #ifdef _MSC_VER
35  #pragma warning(pop)
36 #endif
37  #include <boost/lockfree/atomic_int.hpp>
38 #endif
39 
40 
41 namespace pion { // begin namespace pion
42 
43 
51 template <std::size_t MinSize = 16, std::size_t MaxSize = 256>
53  : private boost::noncopyable
54 {
55 public:
56 
59  {}
60 
63  {
64  for (std::size_t n = 0; n < NumberOfAllocs; ++n) {
65  m_pools[n].reset(new FixedSizeAlloc((n+1) * MinSize));
66  }
67  }
68 
76  inline void *malloc(std::size_t n)
77  {
78  // check for size greater than MaxSize
79  if (n > MaxSize)
80  return ::malloc(n);
81  FixedSizeAlloc *pool_ptr = getPool(n);
82 
83 #if defined(PION_HAVE_LOCKFREE)
84  while (true) {
85  // get copy of free list pointer
86  FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
87  if (! old_free_ptr)
88  break; // use pool alloc if free list is empty
89 
90  // use CAS operation to swap the free list pointer
91  if (pool_ptr->m_free_ptr.cas(old_free_ptr, old_free_ptr->next.get_ptr()))
92  return reinterpret_cast<void*>(old_free_ptr.get_ptr());
93  }
94 #endif
95 
96  boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
97  return pool_ptr->m_pool.malloc();
98  }
99 
106  inline void free(void *ptr, std::size_t n)
107  {
108  // check for size greater than MaxSize
109  if (n > MaxSize) {
110  ::free(ptr);
111  return;
112  }
113  FixedSizeAlloc *pool_ptr = getPool(n);
114 #if defined(PION_HAVE_LOCKFREE)
115  while (true) {
116  // get copy of free list pointer
117  FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
118 
119  // cast memory being released to a free list node
120  // and point its next pointer to the current free list
121  FreeListNode *node_ptr = reinterpret_cast<FreeListNode*>(ptr);
122  node_ptr->next.set_ptr(old_free_ptr.get_ptr());
123 
124  // use CAS operation to swap the free list pointer
125  if (pool_ptr->m_free_ptr.cas(old_free_ptr, node_ptr))
126  break;
127  }
128 #else
129  boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
130  return pool_ptr->m_pool.free(ptr);
131 #endif
132  }
133 
141  inline bool release_memory(size_t pad = 10240000UL)
142  {
143  bool result = false;
144 /*
145  for (std::size_t n = 0; n < NumberOfAllocs; ++n) {
146  FixedSizeAlloc *pool_ptr = m_pools[n].get();
147  // need to lock before releasing free list because of calls
148  // to pool::free()
149  boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
150 #if defined(PION_HAVE_LOCKFREE)
151  while (true) {
152  // get copy of free list pointer
153  FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
154  if (! old_free_ptr)
155  break; // all done: free list is empty
156 
157  // use CAS operation to swap the free list pointer
158  if (pool_ptr->m_free_ptr.cas(old_free_ptr, old_free_ptr->next.get_ptr()))
159  pool_ptr->m_pool.free(old_free_ptr.get_ptr()); // release memory from pool
160  }
161 #endif
162  if (pool_ptr->m_pool.release_memory())
163  result = true;
164  }
165 #if defined(PION_HAVE_MALLOC_TRIM)
166  ::malloc_trim(pad);
167 #endif
168 */
169  return result;
170  }
171 
172 
173 protected:
174 
175 #if defined(PION_HAVE_LOCKFREE)
176  struct FreeListNode {
179  };
180 
183 #else
184  typedef void * FreeListPtr;
185 #endif
186 
191  BOOST_STATIC_ASSERT(MaxSize >= MinSize);
192  BOOST_STATIC_ASSERT(MaxSize % MinSize == 0);
193 #if defined(PION_HAVE_LOCKFREE)
194  BOOST_STATIC_ASSERT(MinSize >= sizeof(FreeListNode));
195 #endif
196 
198  enum { NumberOfAllocs = ((MaxSize-1) / MinSize) + 1 };
199 
205  {
211  FixedSizeAlloc(std::size_t size)
212  : m_size(size), m_pool(size), m_free_ptr(NULL)
213  {}
214 
216  boost::mutex m_mutex;
217 
219  std::size_t m_size;
220 
222  boost::pool<> m_pool;
223 
225  FreeListPtr m_free_ptr;
226  };
227 
228 
236  inline FixedSizeAlloc* getPool(const std::size_t n)
237  {
238  PION_ASSERT(n > 0);
239  PION_ASSERT(n <= MaxSize);
240  return m_pools[ (n-1) / MinSize ].get();
241  }
242 
243 
244 private:
245 
247  boost::array<boost::scoped_ptr<FixedSizeAlloc>, NumberOfAllocs> m_pools;
248 };
249 
250 
251 } // end namespace pion
252 
253 #endif
boost::pool m_pool
underlying pool allocator used for memory management
void * malloc(std::size_t n)
FreeListPtr m_free_ptr
pointer to a list of free nodes (for lock-free cache)
std::size_t m_size
size of memory blocks managed by this allocator, in bytes
void free(void *ptr, std::size_t n)
bool release_memory(size_t pad=10240000UL)
virtual ~PionPoolAllocator()
virtual destructor
BOOST_STATIC_ASSERT(MaxSize >=MinSize)
PionPoolAllocator(void)
default constructor
FixedSizeAlloc * getPool(const std::size_t n)
boost::mutex m_mutex
used to protect access to the memory pool