/usr/include/pion/PionPoolAllocator.hpp is in libpion-common-dev 4.0.7+dfsg-2.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 | // -----------------------------------------------------------------------
// pion-common: a collection of common libraries used by the Pion Platform
// -----------------------------------------------------------------------
// Copyright (C) 2007-2008 Atomic Labs, Inc. (http://www.atomiclabs.com)
//
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
//
#ifndef __PION_PIONPOOLALLOCATOR_HEADER__
#define __PION_PIONPOOLALLOCATOR_HEADER__
#include <cstdlib>
#include <boost/array.hpp>
#include <boost/scoped_ptr.hpp>
#include <boost/static_assert.hpp>
#include <boost/noncopyable.hpp>
#include <boost/thread/mutex.hpp>
#include <boost/pool/pool.hpp>
#include <pion/PionConfig.hpp>
#include <pion/PionException.hpp>
#if defined(PION_HAVE_MALLOC_TRIM)
#include <malloc.h>
#endif
/// the following enables use of the lock-free cache
#if defined(PION_HAVE_LOCKFREE)
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable: 4800) // forcing value to bool 'true' or 'false' (performance warning)
#endif
#include <boost/lockfree/detail/tagged_ptr.hpp>
#ifdef _MSC_VER
#pragma warning(pop)
#endif
#include <boost/lockfree/atomic_int.hpp>
#endif
namespace pion { // begin namespace pion
///
/// PionPoolAllocator: a thread-safe, small object allocator that sacrifices
/// memory utilization for performance. It combines a
/// collection of fixed-size pooled memory allocators with
/// lock-free caches to achieve nearly wait-free, constant
/// time performance when used for an extended period of time
///
template <std::size_t MinSize = 16, std::size_t MaxSize = 256>
class PionPoolAllocator
: private boost::noncopyable
{
public:
/// virtual destructor
virtual ~PionPoolAllocator()
{}
/// default constructor
PionPoolAllocator(void)
{
for (std::size_t n = 0; n < NumberOfAllocs; ++n) {
m_pools[n].reset(new FixedSizeAlloc((n+1) * MinSize));
}
}
/**
* allocates a block of memory
*
* @param n minimum size of the new memory block, in bytes
*
* @return void * raw pointer to the new memory block
*/
inline void *malloc(std::size_t n)
{
// check for size greater than MaxSize
if (n > MaxSize)
return ::malloc(n);
FixedSizeAlloc *pool_ptr = getPool(n);
#if defined(PION_HAVE_LOCKFREE)
while (true) {
// get copy of free list pointer
FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
if (! old_free_ptr)
break; // use pool alloc if free list is empty
// use CAS operation to swap the free list pointer
if (pool_ptr->m_free_ptr.cas(old_free_ptr, old_free_ptr->next.get_ptr()))
return reinterpret_cast<void*>(old_free_ptr.get_ptr());
}
#endif
boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
return pool_ptr->m_pool.malloc();
}
/**
* deallocates a block of memory
*
* @param ptr raw pointer to the block of memory
* @param n requested size of the memory block, in bytes (actual size may be larger)
*/
inline void free(void *ptr, std::size_t n)
{
// check for size greater than MaxSize
if (n > MaxSize) {
::free(ptr);
return;
}
FixedSizeAlloc *pool_ptr = getPool(n);
#if defined(PION_HAVE_LOCKFREE)
while (true) {
// get copy of free list pointer
FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
// cast memory being released to a free list node
// and point its next pointer to the current free list
FreeListNode *node_ptr = reinterpret_cast<FreeListNode*>(ptr);
node_ptr->next.set_ptr(old_free_ptr.get_ptr());
// use CAS operation to swap the free list pointer
if (pool_ptr->m_free_ptr.cas(old_free_ptr, node_ptr))
break;
}
#else
boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
return pool_ptr->m_pool.free(ptr);
#endif
}
/**
* releases every memory block that does not have any allocated chunks
*
* @param pad padding bytes passed to malloc_trim(), if it's supported (default=10MB)
*
* @return bool true if at least one block of memory was released
*/
inline bool release_memory(size_t pad = 10240000UL)
{
bool result = false;
/*
for (std::size_t n = 0; n < NumberOfAllocs; ++n) {
FixedSizeAlloc *pool_ptr = m_pools[n].get();
// need to lock before releasing free list because of calls
// to pool::free()
boost::unique_lock<boost::mutex> pool_lock(pool_ptr->m_mutex);
#if defined(PION_HAVE_LOCKFREE)
while (true) {
// get copy of free list pointer
FreeListPtr old_free_ptr(pool_ptr->m_free_ptr);
if (! old_free_ptr)
break; // all done: free list is empty
// use CAS operation to swap the free list pointer
if (pool_ptr->m_free_ptr.cas(old_free_ptr, old_free_ptr->next.get_ptr()))
pool_ptr->m_pool.free(old_free_ptr.get_ptr()); // release memory from pool
}
#endif
if (pool_ptr->m_pool.release_memory())
result = true;
}
#if defined(PION_HAVE_MALLOC_TRIM)
::malloc_trim(pad);
#endif
*/
return result;
}
protected:
#if defined(PION_HAVE_LOCKFREE)
/// data structure used to represent a free node
struct FreeListNode {
boost::lockfree::tagged_ptr<struct FreeListNode> next;
};
/// data type for a tagged free-list pointer
typedef boost::lockfree::tagged_ptr<struct FreeListNode> FreeListPtr;
#else
typedef void * FreeListPtr;
#endif
/// ensure that:
/// a) MaxSize >= MinSize
/// b) MaxSize is a multiple of MinSize
/// c) MinSize >= sizeof(FreeNodePtr) [usually 16]
BOOST_STATIC_ASSERT(MaxSize >= MinSize);
BOOST_STATIC_ASSERT(MaxSize % MinSize == 0);
#if defined(PION_HAVE_LOCKFREE)
BOOST_STATIC_ASSERT(MinSize >= sizeof(FreeListNode));
#endif
/// constant representing the number of fixed-size pool allocators
enum { NumberOfAllocs = ((MaxSize-1) / MinSize) + 1 };
/**
* data structure used to represent a pooled memory
* allocator for blocks of a specific size
*/
struct FixedSizeAlloc
{
/**
* constructs a new fixed-size pool allocator
*
* @param size size of memory blocks managed by this allocator, in bytes
*/
FixedSizeAlloc(std::size_t size)
: m_size(size), m_pool(size), m_free_ptr(NULL)
{}
/// used to protect access to the memory pool
boost::mutex m_mutex;
/// size of memory blocks managed by this allocator, in bytes
std::size_t m_size;
/// underlying pool allocator used for memory management
boost::pool<> m_pool;
/// pointer to a list of free nodes (for lock-free cache)
FreeListPtr m_free_ptr;
};
/**
* gets an appropriate fixed-size pool allocator
*
* @param n the number of bytes to be (de)allocated
*
* @return FixedSizeAlloc* pointer to the appropriate fixed-size allocator
*/
inline FixedSizeAlloc* getPool(const std::size_t n)
{
PION_ASSERT(n > 0);
PION_ASSERT(n <= MaxSize);
return m_pools[ (n-1) / MinSize ].get();
}
private:
/// a collection of fixed-size pool allocators
boost::array<boost::scoped_ptr<FixedSizeAlloc>, NumberOfAllocs> m_pools;
};
} // end namespace pion
#endif
|