00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00035 #ifndef _MT_ALLOCATOR_H
00036 #define _MT_ALLOCATOR_H 1
00037
00038 #include <new>
00039 #include <cstdlib>
00040 #include <bits/functexcept.h>
00041 #include <bits/gthr.h>
00042 #include <bits/atomicity.h>
00043
00044 namespace __gnu_cxx
00045 {
00056 template<typename _Tp>
00057 class __mt_alloc
00058 {
00059 public:
00060 typedef size_t size_type;
00061 typedef ptrdiff_t difference_type;
00062 typedef _Tp* pointer;
00063 typedef const _Tp* const_pointer;
00064 typedef _Tp& reference;
00065 typedef const _Tp& const_reference;
00066 typedef _Tp value_type;
00067
00068 template<typename _Tp1>
00069 struct rebind
00070 { typedef __mt_alloc<_Tp1> other; };
00071
00072 __mt_alloc() throw()
00073 {
00074
00075 }
00076
00077 __mt_alloc(const __mt_alloc&) throw()
00078 {
00079
00080 }
00081
00082 template<typename _Tp1>
00083 __mt_alloc(const __mt_alloc<_Tp1>& obj) throw()
00084 {
00085
00086 }
00087
00088 ~__mt_alloc() throw() { }
00089
00090 pointer
00091 address(reference __x) const
00092 { return &__x; }
00093
00094 const_pointer
00095 address(const_reference __x) const
00096 { return &__x; }
00097
00098 size_type
00099 max_size() const throw()
00100 { return size_t(-1) / sizeof(_Tp); }
00101
00102
00103
00104 void
00105 construct(pointer __p, const _Tp& __val)
00106 { ::new(__p) _Tp(__val); }
00107
00108 void
00109 destroy(pointer __p) { __p->~_Tp(); }
00110
00111 pointer
00112 allocate(size_type __n, const void* = 0);
00113
00114 void
00115 deallocate(pointer __p, size_type __n);
00116
00117
00118
00119 struct _Tune
00120 {
00121
00122
00123
00124 size_t _M_align;
00125
00126
00127
00128
00129 size_t _M_max_bytes;
00130
00131
00132
00133 size_t _M_min_bin;
00134
00135
00136
00137
00138
00139
00140 size_t _M_chunk_size;
00141
00142
00143
00144 size_t _M_max_threads;
00145
00146
00147
00148
00149
00150
00151
00152 size_t _M_freelist_headroom;
00153
00154
00155 bool _M_force_new;
00156
00157 explicit
00158 _Tune()
00159 : _M_align(8), _M_max_bytes(128), _M_min_bin(8),
00160 _M_chunk_size(4096 - 4 * sizeof(void*)),
00161 _M_max_threads(4096), _M_freelist_headroom(10),
00162 _M_force_new(getenv("GLIBCXX_FORCE_NEW") ? true : false)
00163 { }
00164
00165 explicit
00166 _Tune(size_t __align, size_t __maxb, size_t __minbin,
00167 size_t __chunk, size_t __maxthreads, size_t __headroom,
00168 bool __force)
00169 : _M_align(__align), _M_max_bytes(__maxb), _M_min_bin(__minbin),
00170 _M_chunk_size(__chunk), _M_max_threads(__maxthreads),
00171 _M_freelist_headroom(__headroom), _M_force_new(__force)
00172 { }
00173 };
00174
00175 private:
00176
00177
00178 #ifdef __GTHREADS
00179 static __gthread_once_t _S_once;
00180 #endif
00181 static bool _S_init;
00182
00183 static void
00184 _S_initialize();
00185
00186
00187 static _Tune _S_options;
00188
00189 static const _Tune
00190 _S_get_options()
00191 { return _S_options; }
00192
00193 static void
00194 _S_set_options(_Tune __t)
00195 {
00196 if (!_S_init)
00197 _S_options = __t;
00198 }
00199
00200
00201
00202 typedef unsigned short int _Binmap_type;
00203 static _Binmap_type* _S_binmap;
00204
00205
00206
00207
00208
00209
00210
00211
00212
00213
00214 #ifdef __GTHREADS
00215 struct _Thread_record
00216 {
00217
00218 _Thread_record* volatile _M_next;
00219
00220
00221 size_t _M_id;
00222 };
00223
00224 static _Thread_record* volatile _S_thread_freelist_first;
00225 static __gthread_mutex_t _S_thread_freelist_mutex;
00226 static __gthread_key_t _S_thread_key;
00227
00228 static void
00229 _S_destroy_thread_key(void* __freelist_pos);
00230 #endif
00231
00232 static size_t
00233 _S_get_thread_id();
00234
00235 union _Block_record
00236 {
00237
00238 _Block_record* volatile _M_next;
00239
00240 #ifdef __GTHREADS
00241
00242 size_t _M_thread_id;
00243 #endif
00244 };
00245
00246 struct _Bin_record
00247 {
00248
00249
00250
00251 _Block_record** volatile _M_first;
00252
00253 #ifdef __GTHREADS
00254
00255
00256
00257
00258 size_t* volatile _M_free;
00259 size_t* volatile _M_used;
00260
00261
00262
00263
00264 __gthread_mutex_t* _M_mutex;
00265 #endif
00266 };
00267
00268
00269
00270
00271 static _Bin_record* volatile _S_bin;
00272
00273
00274 static size_t _S_bin_size;
00275 };
00276
00277 template<typename _Tp>
00278 typename __mt_alloc<_Tp>::pointer
00279 __mt_alloc<_Tp>::
00280 allocate(size_type __n, const void*)
00281 {
00282
00283
00284
00285
00286
00287 if (!_S_init)
00288 {
00289 #ifdef __GTHREADS
00290 if (__gthread_active_p())
00291 __gthread_once(&_S_once, _S_initialize);
00292 #endif
00293 if (!_S_init)
00294 _S_initialize();
00295 }
00296
00297
00298
00299 const size_t __bytes = __n * sizeof(_Tp);
00300 if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00301 {
00302 void* __ret = ::operator new(__bytes);
00303 return static_cast<_Tp*>(__ret);
00304 }
00305
00306
00307 const size_t __which = _S_binmap[__bytes];
00308 const size_t __thread_id = _S_get_thread_id();
00309
00310
00311
00312 const _Bin_record& __bin = _S_bin[__which];
00313 _Block_record* __block = NULL;
00314 if (__bin._M_first[__thread_id] == NULL)
00315 {
00316
00317
00318 const size_t __bin_size = ((_S_options._M_min_bin << __which)
00319 + _S_options._M_align);
00320 size_t __block_count = _S_options._M_chunk_size / __bin_size;
00321
00322
00323
00324
00325
00326
00327
00328
00329
00330
00331
00332 #ifdef __GTHREADS
00333 if (__gthread_active_p())
00334 {
00335 __gthread_mutex_lock(__bin._M_mutex);
00336 if (__bin._M_first[0] == NULL)
00337 {
00338
00339
00340 __gthread_mutex_unlock(__bin._M_mutex);
00341
00342 void* __v = ::operator new(_S_options._M_chunk_size);
00343 __bin._M_first[__thread_id] = static_cast<_Block_record*>(__v);
00344 __bin._M_free[__thread_id] = __block_count;
00345
00346 --__block_count;
00347 __block = __bin._M_first[__thread_id];
00348 while (__block_count-- > 0)
00349 {
00350 char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00351 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00352 __block = __block->_M_next;
00353 }
00354 __block->_M_next = NULL;
00355 }
00356 else
00357 {
00358
00359
00360
00361 __bin._M_first[__thread_id] = __bin._M_first[0];
00362 if (__block_count >= __bin._M_free[0])
00363 {
00364 __bin._M_free[__thread_id] = __bin._M_free[0];
00365 __bin._M_free[0] = 0;
00366 __bin._M_first[0] = NULL;
00367 }
00368 else
00369 {
00370 __bin._M_free[__thread_id] = __block_count;
00371 __bin._M_free[0] -= __block_count;
00372 --__block_count;
00373 __block = __bin._M_first[0];
00374 while (__block_count-- > 0)
00375 __block = __block->_M_next;
00376 __bin._M_first[0] = __block->_M_next;
00377 __block->_M_next = NULL;
00378 }
00379 __gthread_mutex_unlock(__bin._M_mutex);
00380 }
00381 }
00382 else
00383 #endif
00384 {
00385 void* __v = ::operator new(_S_options._M_chunk_size);
00386 __bin._M_first[0] = static_cast<_Block_record*>(__v);
00387
00388 --__block_count;
00389 __block = __bin._M_first[0];
00390 while (__block_count-- > 0)
00391 {
00392 char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00393 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00394 __block = __block->_M_next;
00395 }
00396 __block->_M_next = NULL;
00397 }
00398 }
00399
00400 __block = __bin._M_first[__thread_id];
00401 __bin._M_first[__thread_id] = __bin._M_first[__thread_id]->_M_next;
00402 #ifdef __GTHREADS
00403 if (__gthread_active_p())
00404 {
00405 __block->_M_thread_id = __thread_id;
00406 --__bin._M_free[__thread_id];
00407 ++__bin._M_used[__thread_id];
00408 }
00409 #endif
00410
00411 char* __c = reinterpret_cast<char*>(__block) + _S_options._M_align;
00412 return static_cast<_Tp*>(static_cast<void*>(__c));
00413 }
00414
00415 template<typename _Tp>
00416 void
00417 __mt_alloc<_Tp>::
00418 deallocate(pointer __p, size_type __n)
00419 {
00420
00421
00422 const size_t __bytes = __n * sizeof(_Tp);
00423 if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00424 {
00425 ::operator delete(__p);
00426 return;
00427 }
00428
00429
00430 const size_t __which = _S_binmap[__bytes];
00431 const _Bin_record& __bin = _S_bin[__which];
00432
00433 char* __c = reinterpret_cast<char*>(__p) - _S_options._M_align;
00434 _Block_record* __block = reinterpret_cast<_Block_record*>(__c);
00435
00436 #ifdef __GTHREADS
00437 if (__gthread_active_p())
00438 {
00439
00440
00441
00442 const size_t __thread_id = _S_get_thread_id();
00443
00444 long __remove = ((__bin._M_free[__thread_id]
00445 * _S_options._M_freelist_headroom)
00446 - __bin._M_used[__thread_id]);
00447 if (__remove > static_cast<long>(100 * (_S_bin_size - __which)
00448 * _S_options._M_freelist_headroom)
00449 && __remove > static_cast<long>(__bin._M_free[__thread_id]))
00450 {
00451 _Block_record* __tmp = __bin._M_first[__thread_id];
00452 _Block_record* __first = __tmp;
00453 __remove /= _S_options._M_freelist_headroom;
00454 const long __removed = __remove;
00455 --__remove;
00456 while (__remove-- > 0)
00457 __tmp = __tmp->_M_next;
00458 __bin._M_first[__thread_id] = __tmp->_M_next;
00459 __bin._M_free[__thread_id] -= __removed;
00460
00461 __gthread_mutex_lock(__bin._M_mutex);
00462 __tmp->_M_next = __bin._M_first[0];
00463 __bin._M_first[0] = __first;
00464 __bin._M_free[0] += __removed;
00465 __gthread_mutex_unlock(__bin._M_mutex);
00466 }
00467
00468
00469
00470 --__bin._M_used[__block->_M_thread_id];
00471
00472 __block->_M_next = __bin._M_first[__thread_id];
00473 __bin._M_first[__thread_id] = __block;
00474
00475 ++__bin._M_free[__thread_id];
00476 }
00477 else
00478 #endif
00479 {
00480
00481 __block->_M_next = __bin._M_first[0];
00482 __bin._M_first[0] = __block;
00483 }
00484 }
00485
00486 template<typename _Tp>
00487 void
00488 __mt_alloc<_Tp>::
00489 _S_initialize()
00490 {
00491
00492
00493
00494
00495
00496
00497
00498
00499 if (_S_options._M_align == 0)
00500 new (&_S_options) _Tune;
00501
00502
00503
00504
00505 if (_S_options._M_force_new)
00506 {
00507 _S_init = true;
00508 return;
00509 }
00510
00511
00512
00513 size_t __bin_size = _S_options._M_min_bin;
00514 while (_S_options._M_max_bytes > __bin_size)
00515 {
00516 __bin_size <<= 1;
00517 ++_S_bin_size;
00518 }
00519
00520
00521 const size_t __j = (_S_options._M_max_bytes + 1) * sizeof(_Binmap_type);
00522 _S_binmap = static_cast<_Binmap_type*>(::operator new(__j));
00523
00524 _Binmap_type* __bp = _S_binmap;
00525 _Binmap_type __bin_max = _S_options._M_min_bin;
00526 _Binmap_type __bint = 0;
00527 for (_Binmap_type __ct = 0; __ct <= _S_options._M_max_bytes; ++__ct)
00528 {
00529 if (__ct > __bin_max)
00530 {
00531 __bin_max <<= 1;
00532 ++__bint;
00533 }
00534 *__bp++ = __bint;
00535 }
00536
00537
00538 void* __v = ::operator new(sizeof(_Bin_record) * _S_bin_size);
00539 _S_bin = static_cast<_Bin_record*>(__v);
00540
00541
00542
00543
00544 #ifdef __GTHREADS
00545 if (__gthread_active_p())
00546 {
00547 const size_t __k = sizeof(_Thread_record) * _S_options._M_max_threads;
00548 __v = ::operator new(__k);
00549 _S_thread_freelist_first = static_cast<_Thread_record*>(__v);
00550
00551
00552
00553 size_t __i;
00554 for (__i = 1; __i < _S_options._M_max_threads; ++__i)
00555 {
00556 _Thread_record& __tr = _S_thread_freelist_first[__i - 1];
00557 __tr._M_next = &_S_thread_freelist_first[__i];
00558 __tr._M_id = __i;
00559 }
00560
00561
00562 _S_thread_freelist_first[__i - 1]._M_next = NULL;
00563 _S_thread_freelist_first[__i - 1]._M_id = __i;
00564
00565
00566 #ifndef __GTHREAD_MUTEX_INIT
00567 __GTHREAD_MUTEX_INIT_FUNCTION(&_S_thread_freelist_mutex);
00568 #endif
00569
00570
00571 __gthread_key_create(&_S_thread_key, _S_destroy_thread_key);
00572
00573 const size_t __max_threads = _S_options._M_max_threads + 1;
00574 for (size_t __n = 0; __n < _S_bin_size; ++__n)
00575 {
00576 _Bin_record& __bin = _S_bin[__n];
00577 __v = ::operator new(sizeof(_Block_record*) * __max_threads);
00578 __bin._M_first = static_cast<_Block_record**>(__v);
00579
00580 __v = ::operator new(sizeof(size_t) * __max_threads);
00581 __bin._M_free = static_cast<size_t*>(__v);
00582
00583 __v = ::operator new(sizeof(size_t) * __max_threads);
00584 __bin._M_used = static_cast<size_t*>(__v);
00585
00586 __v = ::operator new(sizeof(__gthread_mutex_t));
00587 __bin._M_mutex = static_cast<__gthread_mutex_t*>(__v);
00588
00589 #ifdef __GTHREAD_MUTEX_INIT
00590 {
00591
00592 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
00593 *__bin._M_mutex = __tmp;
00594 }
00595 #else
00596 { __GTHREAD_MUTEX_INIT_FUNCTION(__bin._M_mutex); }
00597 #endif
00598
00599 for (size_t __threadn = 0; __threadn < __max_threads;
00600 ++__threadn)
00601 {
00602 __bin._M_first[__threadn] = NULL;
00603 __bin._M_free[__threadn] = 0;
00604 __bin._M_used[__threadn] = 0;
00605 }
00606 }
00607 }
00608 else
00609 #endif
00610 for (size_t __n = 0; __n < _S_bin_size; ++__n)
00611 {
00612 _Bin_record& __bin = _S_bin[__n];
00613 __v = ::operator new(sizeof(_Block_record*));
00614 __bin._M_first = static_cast<_Block_record**>(__v);
00615 __bin._M_first[0] = NULL;
00616 }
00617
00618 _S_init = true;
00619 }
00620
00621 template<typename _Tp>
00622 size_t
00623 __mt_alloc<_Tp>::
00624 _S_get_thread_id()
00625 {
00626 #ifdef __GTHREADS
00627
00628
00629
00630
00631 if (__gthread_active_p())
00632 {
00633 _Thread_record* __freelist_pos =
00634 static_cast<_Thread_record*>(__gthread_getspecific(_S_thread_key));
00635 if (__freelist_pos == NULL)
00636 {
00637
00638
00639
00640 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00641 __freelist_pos = _S_thread_freelist_first;
00642 _S_thread_freelist_first = _S_thread_freelist_first->_M_next;
00643 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00644
00645 __gthread_setspecific(_S_thread_key,
00646 static_cast<void*>(__freelist_pos));
00647 }
00648 return __freelist_pos->_M_id;
00649 }
00650 #endif
00651
00652
00653 return 0;
00654 }
00655
00656 #ifdef __GTHREADS
00657 template<typename _Tp>
00658 void
00659 __mt_alloc<_Tp>::
00660 _S_destroy_thread_key(void* __freelist_pos)
00661 {
00662
00663 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00664 _Thread_record* __tr = static_cast<_Thread_record*>(__freelist_pos);
00665 __tr->_M_next = _S_thread_freelist_first;
00666 _S_thread_freelist_first = __tr;
00667 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00668 }
00669 #endif
00670
00671 template<typename _Tp>
00672 inline bool
00673 operator==(const __mt_alloc<_Tp>&, const __mt_alloc<_Tp>&)
00674 { return true; }
00675
00676 template<typename _Tp>
00677 inline bool
00678 operator!=(const __mt_alloc<_Tp>&, const __mt_alloc<_Tp>&)
00679 { return false; }
00680
00681 template<typename _Tp>
00682 bool __mt_alloc<_Tp>::_S_init = false;
00683
00684 template<typename _Tp>
00685 typename __mt_alloc<_Tp>::_Tune __mt_alloc<_Tp>::_S_options;
00686
00687 template<typename _Tp>
00688 typename __mt_alloc<_Tp>::_Binmap_type* __mt_alloc<_Tp>::_S_binmap;
00689
00690 template<typename _Tp>
00691 typename __mt_alloc<_Tp>::_Bin_record* volatile __mt_alloc<_Tp>::_S_bin;
00692
00693 template<typename _Tp>
00694 size_t __mt_alloc<_Tp>::_S_bin_size = 1;
00695
00696
00697 #ifdef __GTHREADS
00698 template<typename _Tp>
00699 __gthread_once_t __mt_alloc<_Tp>::_S_once = __GTHREAD_ONCE_INIT;
00700
00701 template<typename _Tp>
00702 typename __mt_alloc<_Tp>::_Thread_record*
00703 volatile __mt_alloc<_Tp>::_S_thread_freelist_first = NULL;
00704
00705 template<typename _Tp>
00706 __gthread_key_t __mt_alloc<_Tp>::_S_thread_key;
00707
00708 template<typename _Tp>
00709 __gthread_mutex_t
00710 #ifdef __GTHREAD_MUTEX_INIT
00711 __mt_alloc<_Tp>::_S_thread_freelist_mutex = __GTHREAD_MUTEX_INIT;
00712 #else
00713 __mt_alloc<_Tp>::_S_thread_freelist_mutex;
00714 #endif
00715 #endif
00716 }
00717
00718 #endif