00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00047 #ifndef _POOL_ALLOCATOR_H
00048 #define _POOL_ALLOCATOR_H 1
00049
00050 #include <bits/c++config.h>
00051 #include <cstdlib>
00052 #include <new>
00053 #include <bits/functexcept.h>
00054 #include <bits/atomicity.h>
00055 #include <bits/concurrence.h>
00056
00057 namespace __gnu_cxx
00058 {
00077 class __pool_alloc_base
00078 {
00079 protected:
00080
00081 enum { _S_align = 8 };
00082 enum { _S_max_bytes = 128 };
00083 enum { _S_free_list_size = (size_t)_S_max_bytes / (size_t)_S_align };
00084
00085 union _Obj
00086 {
00087 union _Obj* _M_free_list_link;
00088 char _M_client_data[1];
00089 };
00090
00091 static _Obj* volatile _S_free_list[_S_free_list_size];
00092
00093
00094 static char* _S_start_free;
00095 static char* _S_end_free;
00096 static size_t _S_heap_size;
00097
00098 size_t
00099 _M_round_up(size_t __bytes)
00100 { return ((__bytes + (size_t)_S_align - 1) & ~((size_t)_S_align - 1)); }
00101
00102 _Obj* volatile*
00103 _M_get_free_list(size_t __bytes);
00104
00105 mutex_type&
00106 _M_get_mutex();
00107
00108
00109
00110 void*
00111 _M_refill(size_t __n);
00112
00113
00114
00115 char*
00116 _M_allocate_chunk(size_t __n, int& __nobjs);
00117 };
00118
00119
00121 template<typename _Tp>
00122 class __pool_alloc : private __pool_alloc_base
00123 {
00124 private:
00125 static _Atomic_word _S_force_new;
00126
00127 public:
00128 typedef size_t size_type;
00129 typedef ptrdiff_t difference_type;
00130 typedef _Tp* pointer;
00131 typedef const _Tp* const_pointer;
00132 typedef _Tp& reference;
00133 typedef const _Tp& const_reference;
00134 typedef _Tp value_type;
00135
00136 template<typename _Tp1>
00137 struct rebind
00138 { typedef __pool_alloc<_Tp1> other; };
00139
00140 __pool_alloc() throw() { }
00141
00142 __pool_alloc(const __pool_alloc&) throw() { }
00143
00144 template<typename _Tp1>
00145 __pool_alloc(const __pool_alloc<_Tp1>&) throw() { }
00146
00147 ~__pool_alloc() throw() { }
00148
00149 pointer
00150 address(reference __x) const { return &__x; }
00151
00152 const_pointer
00153 address(const_reference __x) const { return &__x; }
00154
00155 size_type
00156 max_size() const throw()
00157 { return size_t(-1) / sizeof(_Tp); }
00158
00159
00160
00161 void
00162 construct(pointer __p, const _Tp& __val)
00163 { ::new(__p) _Tp(__val); }
00164
00165 void
00166 destroy(pointer __p) { __p->~_Tp(); }
00167
00168 pointer
00169 allocate(size_type __n, const void* = 0);
00170
00171 void
00172 deallocate(pointer __p, size_type __n);
00173 };
00174
00175 template<typename _Tp>
00176 inline bool
00177 operator==(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00178 { return true; }
00179
00180 template<typename _Tp>
00181 inline bool
00182 operator!=(const __pool_alloc<_Tp>&, const __pool_alloc<_Tp>&)
00183 { return false; }
00184
00185 template<typename _Tp>
00186 _Atomic_word
00187 __pool_alloc<_Tp>::_S_force_new;
00188
00189 template<typename _Tp>
00190 _Tp*
00191 __pool_alloc<_Tp>::allocate(size_type __n, const void*)
00192 {
00193 pointer __ret = 0;
00194 if (__builtin_expect(__n != 0, true))
00195 {
00196 if (__builtin_expect(__n > this->max_size(), false))
00197 std::__throw_bad_alloc();
00198
00199
00200
00201
00202 if (_S_force_new == 0)
00203 {
00204 if (getenv("GLIBCXX_FORCE_NEW"))
00205 __atomic_add(&_S_force_new, 1);
00206 else
00207 __atomic_add(&_S_force_new, -1);
00208 }
00209
00210 const size_t __bytes = __n * sizeof(_Tp);
00211 if (__bytes > size_t(_S_max_bytes) || _S_force_new == 1)
00212 __ret = static_cast<_Tp*>(::operator new(__bytes));
00213 else
00214 {
00215 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00216
00217 lock sentry(_M_get_mutex());
00218 _Obj* __restrict__ __result = *__free_list;
00219 if (__builtin_expect(__result == 0, 0))
00220 __ret = static_cast<_Tp*>(_M_refill(_M_round_up(__bytes)));
00221 else
00222 {
00223 *__free_list = __result->_M_free_list_link;
00224 __ret = reinterpret_cast<_Tp*>(__result);
00225 }
00226 if (__builtin_expect(__ret == 0, 0))
00227 std::__throw_bad_alloc();
00228 }
00229 }
00230 return __ret;
00231 }
00232
00233 template<typename _Tp>
00234 void
00235 __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
00236 {
00237 if (__builtin_expect(__n != 0 && __p != 0, true))
00238 {
00239 const size_t __bytes = __n * sizeof(_Tp);
00240 if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new == 1)
00241 ::operator delete(__p);
00242 else
00243 {
00244 _Obj* volatile* __free_list = _M_get_free_list(__bytes);
00245 _Obj* __q = reinterpret_cast<_Obj*>(__p);
00246
00247 lock sentry(_M_get_mutex());
00248 __q ->_M_free_list_link = *__free_list;
00249 *__free_list = __q;
00250 }
00251 }
00252 }
00253 }
00254
00255 #endif