00001 /* -*- mode:C++; c-basic-offset:4 -*- 00002 Shore-MT -- Multi-threaded port of the SHORE storage manager 00003 00004 Copyright (c) 2007-2009 00005 Data Intensive Applications and Systems Labaratory (DIAS) 00006 Ecole Polytechnique Federale de Lausanne 00007 00008 All Rights Reserved. 00009 00010 Permission to use, copy, modify and distribute this software and 00011 its documentation is hereby granted, provided that both the 00012 copyright notice and this permission notice appear in all copies of 00013 the software, derivative works or modified versions, and any 00014 portions thereof, and that both notices appear in supporting 00015 documentation. 00016 00017 This code is distributed in the hope that it will be useful, but 00018 WITHOUT ANY WARRANTY; without even the implied warranty of 00019 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. THE AUTHORS 00020 DISCLAIM ANY LIABILITY OF ANY KIND FOR ANY DAMAGES WHATSOEVER 00021 RESULTING FROM THE USE OF THIS SOFTWARE. 00022 */ 00023 00024 #ifndef __ATOMIC_TRASH_STACK 00025 #define __ATOMIC_TRASH_STACK 00026 00027 #include <atomic_templates.h> 00028 00029 // for placement new support, which users need 00030 #include <new> 00031 #include <cassert> 00032 #include <stdlib.h> 00033 #include "atomic_container.h" 00034 00035 00036 /** \brief A thread-safe memory pool based on the atomic container, used by 00037 * atomic_class_pool. 00038 * 00039 * Creates a new atomic_container with \e seed pre-allocated 00040 * untyped items of size \e nbytes each. 00041 * It is important to have a non-zero seed value so that 00042 * atomic_container versioning works correctly. 00043 * 00044 * Maintains a global freelist of fixed-sized memory chunks to recycle 00045 * and provides a drop-in replacement for malloc() and free() 00046 * 00047 */ 00048 struct atomic_preallocated_pool : protected atomic_container 00049 { 00050 atomic_preallocated_pool(uint nbytes, long seed=128) 00051 : atomic_container(-sizeof(ptr)), _nbytes(nbytes+sizeof(ptr)) 00052 { 00053 // start with a non-empty pool so threads don't race at the beginning 00054 ptr* head = NULL; 00055 for(int i=0; i < seed; i++) { 00056 vpn u = {alloc()}; 00057 u.p->next = head; 00058 head = u.p; 00059 } 00060 for(int i=0; i < seed; i++) { 00061 ptr* p = head; 00062 head = head->next; 00063 dealloc(p); 00064 } 00065 } 00066 void* alloc() { 00067 void* val = pop(); 00068 if(val) return val; 00069 00070 vpn u = { malloc(_nbytes) }; 00071 if(!u.v) u.v = null(); 00072 return prepare(u); 00073 } 00074 void dealloc(void* val) { push(val); } 00075 00076 ~atomic_preallocated_pool() { 00077 vpn val; 00078 while( (val.v=pop()) ) { 00079 val.n += _offset; // back up to the real start of the pointer 00080 free(val.v); 00081 } 00082 } 00083 00084 uint const _nbytes; 00085 }; 00086 00087 // forward decls... 00088 template<class T> 00089 struct atomic_class_pool; 00090 template<class T> 00091 void* operator new(size_t nbytes, atomic_class_pool<T>& pool); 00092 template<class T> 00093 inline void operator delete(void* ptr, atomic_class_pool<T>& pool); 00094 00095 /** \brief A thread-safe memory pool for typed objects, based on atomic_preallocated_pool. 00096 * 00097 * Provides a replacement for new/delete on the specific class. Note 00098 * that there's actually no way to prevent the user from allocating 00099 * whatever they want, but they will be unable to destroy anything but 00100 * the specified class (and its subclasses). 00101 * 00102 * \code 00103 * Example: 00104 * 00105 * class foo { }; 00106 * atomic_class_pool<foo> pool; 00107 * foo* f = new(pool) foo; 00108 * pool.destroy(f); 00109 * \endcode 00110 */ 00111 template<class T> 00112 struct atomic_class_pool : protected atomic_preallocated_pool { 00113 00114 /** \brief Create a pool for class T. 00115 * 00116 * By default the pool will hand out sizeof(T) bytes at a time; if 00117 * T is a base class and this pool is to be used with subclasses, 00118 * nbytes must be set at least as large as the largest 00119 * class. Oversized allocations will assert(). 00120 */ 00121 atomic_class_pool(long nbytes=sizeof(T), long seed=128) 00122 : atomic_preallocated_pool(nbytes, seed) 00123 { 00124 } 00125 00126 /** \brief Destroys an object (by calling its destructor) and returns its 00127 * memory to the pool. 00128 * 00129 * Undefined behavior results if the object did not come from this 00130 * pool. 00131 */ 00132 void destroy(T* tptr) { 00133 // avoid pointer aliasing problems with the optimizer 00134 union { T* t; void* v; } u = {tptr}; 00135 00136 // destruct the object and deallocate its memory 00137 u.t->~T(); 00138 dealloc(u.v); 00139 } 00140 00141 /** \brief Return the object size given to the constructor. 00142 */ 00143 uint nbytes() { return _nbytes; } 00144 00145 // these guys need to access the underlying preallocated stack 00146 friend void* operator new<>(size_t, atomic_class_pool<T> &); 00147 friend void operator delete<>(void*, atomic_class_pool<T> &); 00148 }; 00149 00150 /** \brief WARNING: When finished, call pool.destroy(t) instead of delete. 00151 * 00152 * NOTE: use placement-style new with the pool. 00153 * \code 00154 * usage: T* t = new(pool) T(...) 00155 * \endcode 00156 */ 00157 00158 template<class T> 00159 inline void* operator new(size_t nbytes, atomic_class_pool<T>& pool) { 00160 assert(pool.nbytes() >= nbytes); 00161 return pool.alloc(); 00162 } 00163 00164 /** Called automatically by the compiler if T's constructor throws 00165 * (otherwise memory would leak). 00166 * 00167 * 00168 * Unfortunately, there is no "delete(pool)" syntax in C++ so the user 00169 * must still call pool.destroy() 00170 */ 00171 template<class T> 00172 inline void operator delete(void* ptr, atomic_class_pool<T>& pool) { 00173 pool.dealloc(ptr); 00174 } 00175 00176 #endif