00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
#ifndef _MT_ALLOCATOR_H
00036
#define _MT_ALLOCATOR_H 1
00037
00038
#include <new>
00039
#include <cstdlib>
00040
#include <bits/functexcept.h>
00041
#include <bits/gthr.h>
00042
#include <bits/atomicity.h>
00043
00044
namespace __gnu_cxx
00045 {
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
template<
typename _Tp>
00057 class __mt_alloc
00058 {
00059
public:
00060
typedef size_t size_type;
00061
typedef ptrdiff_t difference_type;
00062
typedef _Tp* pointer;
00063
typedef const _Tp* const_pointer;
00064
typedef _Tp& reference;
00065
typedef const _Tp& const_reference;
00066
typedef _Tp value_type;
00067
00068
template<
typename _Tp1>
00069
struct rebind
00070 {
typedef __mt_alloc<_Tp1> other; };
00071
00072
__mt_alloc()
throw()
00073 {
00074
00075 }
00076
00077
__mt_alloc(
const __mt_alloc&)
throw()
00078 {
00079
00080 }
00081
00082
template<
typename _Tp1>
00083
__mt_alloc(
const __mt_alloc<_Tp1>& obj)
throw()
00084 {
00085
00086 }
00087
00088 ~
__mt_alloc()
throw() { }
00089
00090 pointer
00091 address(reference __x)
const
00092
{
return &__x; }
00093
00094 const_pointer
00095 address(const_reference __x)
const
00096
{
return &__x; }
00097
00098 size_type
00099 max_size()
const throw()
00100 {
return size_t(-1) /
sizeof(_Tp); }
00101
00102
00103
00104
void
00105 construct(pointer __p,
const _Tp& __val)
00106 { ::new(__p) _Tp(__val); }
00107
00108
void
00109 destroy(pointer __p) { __p->~_Tp(); }
00110
00111 pointer
00112 allocate(size_type __n,
const void* = 0);
00113
00114
void
00115 deallocate(pointer __p, size_type __n);
00116
00117
00118
00119
struct _Tune
00120 {
00121
00122
00123
00124 size_t _M_max_bytes;
00125
00126
00127 size_t _M_min_bin;
00128
00129
00130
00131
00132
00133
00134 size_t _M_chunk_size;
00135
00136
00137
00138 size_t _M_max_threads;
00139
00140
00141
00142
00143
00144
00145
00146 size_t _M_freelist_headroom;
00147
00148
00149
bool _M_force_new;
00150
00151
explicit
00152 _Tune()
00153 : _M_max_bytes(128), _M_min_bin(8),
00154 _M_chunk_size(4096 - 4 *
sizeof(
void*)),
00155 _M_max_threads(4096), _M_freelist_headroom(10),
00156 _M_force_new(getenv(
"GLIBCXX_FORCE_NEW") ?
true :
false)
00157 { }
00158
00159
explicit
00160 _Tune(size_t __maxb, size_t __minbin, size_t __chunk,
00161 size_t __maxthreads, size_t __headroom,
bool __force)
00162 : _M_max_bytes(__maxb), _M_min_bin(__minbin), _M_chunk_size(__chunk),
00163 _M_max_threads(__maxthreads), _M_freelist_headroom(__headroom),
00164 _M_force_new(__force)
00165 { }
00166 };
00167
00168
private:
00169
00170
00171
#ifdef __GTHREADS
00172
static __gthread_once_t _S_once;
00173
#endif
00174
static bool _S_init;
00175
00176
static void
00177 _S_initialize();
00178
00179
00180
static _Tune _S_options;
00181
00182
static const _Tune
00183 _S_get_options()
00184 {
return _S_options; }
00185
00186
static void
00187 _S_set_options(_Tune __t)
00188 {
00189
if (!_S_init)
00190 _S_options = __t;
00191 }
00192
00193
00194
00195
typedef unsigned short int _Binmap_type;
00196
static _Binmap_type* _S_binmap;
00197
00198
00199
00200
00201
00202
00203
00204
00205
00206
00207
#ifdef __GTHREADS
00208
struct _Thread_record
00209 {
00210
00211 _Thread_record*
volatile _M_next;
00212
00213
00214 size_t _M_id;
00215 };
00216
00217
static _Thread_record*
volatile _S_thread_freelist_first;
00218
static __gthread_mutex_t _S_thread_freelist_mutex;
00219
static __gthread_key_t _S_thread_key;
00220
00221
static void
00222 _S_destroy_thread_key(
void* __freelist_pos);
00223
#endif
00224
00225
static size_t
00226 _S_get_thread_id();
00227
00228
union _Block_record
00229 {
00230
00231 _Block_record*
volatile _M_next;
00232
00233
#ifdef __GTHREADS
00234
00235 size_t _M_thread_id;
00236
#endif
00237
};
00238
00239
struct _Bin_record
00240 {
00241
00242
00243
00244 _Block_record**
volatile _M_first;
00245
00246
#ifdef __GTHREADS
00247
00248
00249
00250
00251 size_t*
volatile _M_free;
00252 size_t*
volatile _M_used;
00253
00254
00255
00256
00257 __gthread_mutex_t* _M_mutex;
00258
#endif
00259
};
00260
00261
00262
00263
00264
static _Bin_record*
volatile _S_bin;
00265
00266
00267
static size_t _S_bin_size;
00268 };
00269
00270
template<
typename _Tp>
00271
typename __mt_alloc<_Tp>::pointer
00272
__mt_alloc<_Tp>::
00273
allocate(size_type __n,
const void*)
00274 {
00275
00276
00277
00278
00279
00280
if (!_S_init)
00281 {
00282
#ifdef __GTHREADS
00283
if (__gthread_active_p())
00284 __gthread_once(&_S_once, _S_initialize);
00285
#endif
00286
if (!_S_init)
00287 _S_initialize();
00288 }
00289
00290
00291
00292
const size_t __bytes = __n *
sizeof(_Tp);
00293
if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00294 {
00295
void* __ret = ::operator
new(__bytes);
00296
return static_cast<_Tp*>(__ret);
00297 }
00298
00299
00300
const size_t __which = _S_binmap[__bytes];
00301
const size_t __thread_id = _S_get_thread_id();
00302
00303
00304
00305
const _Bin_record& __bin = _S_bin[__which];
00306 _Block_record* __block = NULL;
00307
if (__bin._M_first[__thread_id] == NULL)
00308 {
00309
const size_t __bin_size = ((_S_options._M_min_bin << __which)
00310 +
sizeof(_Block_record));
00311 size_t __block_count = _S_options._M_chunk_size / __bin_size;
00312
00313
00314
00315
00316
00317
00318
00319
00320
00321
00322
00323
#ifdef __GTHREADS
00324
if (__gthread_active_p())
00325 {
00326 __gthread_mutex_lock(__bin._M_mutex);
00327
if (__bin._M_first[0] == NULL)
00328 {
00329
00330
00331 __gthread_mutex_unlock(__bin._M_mutex);
00332
00333
void* __v = ::operator
new(_S_options._M_chunk_size);
00334 __bin._M_first[__thread_id] = static_cast<_Block_record*>(__v);
00335 __bin._M_free[__thread_id] = __block_count;
00336
00337 --__block_count;
00338 __block = __bin._M_first[__thread_id];
00339
while (__block_count-- > 0)
00340 {
00341
char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00342 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00343 __block = __block->_M_next;
00344 }
00345 __block->_M_next = NULL;
00346 }
00347
else
00348 {
00349
00350
00351
00352 __bin._M_first[__thread_id] = __bin._M_first[0];
00353
if (__block_count >= __bin._M_free[0])
00354 {
00355 __bin._M_free[__thread_id] = __bin._M_free[0];
00356 __bin._M_free[0] = 0;
00357 __bin._M_first[0] = NULL;
00358 }
00359
else
00360 {
00361 __bin._M_free[__thread_id] = __block_count;
00362 __bin._M_free[0] -= __block_count;
00363 --__block_count;
00364 __block = __bin._M_first[0];
00365
while (__block_count-- > 0)
00366 __block = __block->_M_next;
00367 __bin._M_first[0] = __block->_M_next;
00368 __block->_M_next = NULL;
00369 }
00370 __gthread_mutex_unlock(__bin._M_mutex);
00371 }
00372 }
00373
else
00374
#endif
00375
{
00376
void* __v = ::operator
new(_S_options._M_chunk_size);
00377 __bin._M_first[0] = static_cast<_Block_record*>(__v);
00378
00379 --__block_count;
00380 __block = __bin._M_first[0];
00381
while (__block_count-- > 0)
00382 {
00383
char* __c = reinterpret_cast<char*>(__block) + __bin_size;
00384 __block->_M_next = reinterpret_cast<_Block_record*>(__c);
00385 __block = __block->_M_next;
00386 }
00387 __block->_M_next = NULL;
00388 }
00389 }
00390
00391 __block = __bin._M_first[__thread_id];
00392 __bin._M_first[__thread_id] = __bin._M_first[__thread_id]->_M_next;
00393
#ifdef __GTHREADS
00394
if (__gthread_active_p())
00395 {
00396 __block->_M_thread_id = __thread_id;
00397 --__bin._M_free[__thread_id];
00398 ++__bin._M_used[__thread_id];
00399 }
00400
#endif
00401
00402
char* __c = reinterpret_cast<char*>(__block) +
sizeof(_Block_record);
00403
return static_cast<_Tp*>(static_cast<void*>(__c));
00404 }
00405
00406
template<
typename _Tp>
00407
void
00408 __mt_alloc<_Tp>::
00409 deallocate(pointer __p, size_type __n)
00410 {
00411
00412
00413
const size_t __bytes = __n *
sizeof(_Tp);
00414
if (__bytes > _S_options._M_max_bytes || _S_options._M_force_new)
00415 {
00416 ::operator
delete(__p);
00417
return;
00418 }
00419
00420
00421
const size_t __which = _S_binmap[__bytes];
00422
const _Bin_record& __bin = _S_bin[__which];
00423
00424
char* __c = reinterpret_cast<char*>(__p) -
sizeof(_Block_record);
00425 _Block_record* __block = reinterpret_cast<_Block_record*>(__c);
00426
00427
#ifdef __GTHREADS
00428
if (__gthread_active_p())
00429 {
00430
00431
00432
00433
const size_t __thread_id = _S_get_thread_id();
00434
00435
long __remove = ((__bin._M_free[__thread_id]
00436 * _S_options._M_freelist_headroom)
00437 - __bin._M_used[__thread_id]);
00438
if (__remove > static_cast<long>(100 * (_S_bin_size - __which)
00439 * _S_options._M_freelist_headroom)
00440 && __remove > static_cast<long>(__bin._M_free[__thread_id]))
00441 {
00442 _Block_record* __tmp = __bin._M_first[__thread_id];
00443 _Block_record* __first = __tmp;
00444 __remove /= _S_options._M_freelist_headroom;
00445
const long __removed = __remove;
00446 --__remove;
00447
while (__remove-- > 0)
00448 __tmp = __tmp->_M_next;
00449 __bin._M_first[__thread_id] = __tmp->_M_next;
00450 __bin._M_free[__thread_id] -= __removed;
00451
00452 __gthread_mutex_lock(__bin._M_mutex);
00453 __tmp->_M_next = __bin._M_first[0];
00454 __bin._M_first[0] = __first;
00455 __bin._M_free[0] += __removed;
00456 __gthread_mutex_unlock(__bin._M_mutex);
00457 }
00458
00459
00460
00461 --__bin._M_used[__block->_M_thread_id];
00462
00463 __block->_M_next = __bin._M_first[__thread_id];
00464 __bin._M_first[__thread_id] = __block;
00465
00466 ++__bin._M_free[__thread_id];
00467 }
00468
else
00469
#endif
00470
{
00471
00472 __block->_M_next = __bin._M_first[0];
00473 __bin._M_first[0] = __block;
00474 }
00475 }
00476
00477
template<
typename _Tp>
00478
void
00479 __mt_alloc<_Tp>::
00480 _S_initialize()
00481 {
00482
if (_S_options._M_force_new)
00483
return;
00484
00485
00486
00487 size_t __bin_size = _S_options._M_min_bin;
00488
while (_S_options._M_max_bytes > __bin_size)
00489 {
00490 __bin_size <<= 1;
00491 ++_S_bin_size;
00492 }
00493
00494
00495
const size_t __j = (_S_options._M_max_bytes + 1) *
sizeof(_Binmap_type);
00496 _S_binmap = static_cast<_Binmap_type*>(::operator
new(__j));
00497
00498 _Binmap_type* __bp = _S_binmap;
00499 _Binmap_type __bin_max = _S_options._M_min_bin;
00500 _Binmap_type __bint = 0;
00501
for (_Binmap_type __ct = 0; __ct <= _S_options._M_max_bytes; ++__ct)
00502 {
00503
if (__ct > __bin_max)
00504 {
00505 __bin_max <<= 1;
00506 ++__bint;
00507 }
00508 *__bp++ = __bint;
00509 }
00510
00511
00512
void* __v = ::operator
new(
sizeof(_Bin_record) * _S_bin_size);
00513 _S_bin = static_cast<_Bin_record*>(__v);
00514
00515
00516
00517
00518
#ifdef __GTHREADS
00519
if (__gthread_active_p())
00520 {
00521
const size_t __k =
sizeof(_Thread_record) * _S_options._M_max_threads;
00522 __v = ::operator
new(__k);
00523 _S_thread_freelist_first = static_cast<_Thread_record*>(__v);
00524
00525
00526
00527 size_t __i;
00528
for (__i = 1; __i < _S_options._M_max_threads; ++__i)
00529 {
00530 _Thread_record& __tr = _S_thread_freelist_first[__i - 1];
00531 __tr._M_next = &_S_thread_freelist_first[__i];
00532 __tr._M_id = __i;
00533 }
00534
00535
00536 _S_thread_freelist_first[__i - 1]._M_next = NULL;
00537 _S_thread_freelist_first[__i - 1]._M_id = __i;
00538
00539
00540
#ifndef __GTHREAD_MUTEX_INIT
00541
__GTHREAD_MUTEX_INIT_FUNCTION(&_S_thread_freelist_mutex);
00542
#endif
00543
00544
00545 __gthread_key_create(&_S_thread_key, _S_destroy_thread_key);
00546
00547
const size_t __max_threads = _S_options._M_max_threads + 1;
00548
for (size_t __n = 0; __n < _S_bin_size; ++__n)
00549 {
00550 _Bin_record& __bin = _S_bin[__n];
00551 __v = ::operator
new(
sizeof(_Block_record*) * __max_threads);
00552 __bin._M_first = static_cast<_Block_record**>(__v);
00553
00554 __v = ::operator
new(
sizeof(size_t) * __max_threads);
00555 __bin._M_free = static_cast<size_t*>(__v);
00556
00557 __v = ::operator
new(
sizeof(size_t) * __max_threads);
00558 __bin._M_used = static_cast<size_t*>(__v);
00559
00560 __v = ::operator
new(
sizeof(__gthread_mutex_t));
00561 __bin._M_mutex = static_cast<__gthread_mutex_t*>(__v);
00562
00563
#ifdef __GTHREAD_MUTEX_INIT
00564
{
00565
00566 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
00567 *__bin._M_mutex = __tmp;
00568 }
00569
#else
00570
{ __GTHREAD_MUTEX_INIT_FUNCTION(__bin._M_mutex); }
00571
#endif
00572
00573
for (size_t __threadn = 0; __threadn < __max_threads;
00574 ++__threadn)
00575 {
00576 __bin._M_first[__threadn] = NULL;
00577 __bin._M_free[__threadn] = 0;
00578 __bin._M_used[__threadn] = 0;
00579 }
00580 }
00581 }
00582
else
00583
#endif
00584
for (size_t __n = 0; __n < _S_bin_size; ++__n)
00585 {
00586 _Bin_record& __bin = _S_bin[__n];
00587 __v = ::operator
new(
sizeof(_Block_record*));
00588 __bin._M_first = static_cast<_Block_record**>(__v);
00589 __bin._M_first[0] = NULL;
00590 }
00591
00592 _S_init =
true;
00593 }
00594
00595
template<
typename _Tp>
00596 size_t
00597 __mt_alloc<_Tp>::
00598 _S_get_thread_id()
00599 {
00600
#ifdef __GTHREADS
00601
00602
00603
00604
00605
if (__gthread_active_p())
00606 {
00607 _Thread_record* __freelist_pos =
00608 static_cast<_Thread_record*>(__gthread_getspecific(_S_thread_key));
00609
if (__freelist_pos == NULL)
00610 {
00611
00612
00613
00614 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00615 __freelist_pos = _S_thread_freelist_first;
00616 _S_thread_freelist_first = _S_thread_freelist_first->_M_next;
00617 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00618
00619 __gthread_setspecific(_S_thread_key,
00620 static_cast<void*>(__freelist_pos));
00621 }
00622
return __freelist_pos->_M_id;
00623 }
00624
#endif
00625
00626
00627
return 0;
00628 }
00629
00630
#ifdef __GTHREADS
00631
template<
typename _Tp>
00632
void
00633 __mt_alloc<_Tp>::
00634 _S_destroy_thread_key(
void* __freelist_pos)
00635 {
00636
00637 __gthread_mutex_lock(&_S_thread_freelist_mutex);
00638 _Thread_record* __tr = static_cast<_Thread_record*>(__freelist_pos);
00639 __tr->_M_next = _S_thread_freelist_first;
00640 _S_thread_freelist_first = __tr;
00641 __gthread_mutex_unlock(&_S_thread_freelist_mutex);
00642 }
00643
#endif
00644
00645
template<
typename _Tp>
00646
inline bool
00647 operator==(
const __mt_alloc<_Tp>&,
const __mt_alloc<_Tp>&)
00648 {
return true; }
00649
00650
template<
typename _Tp>
00651
inline bool
00652 operator!=(
const __mt_alloc<_Tp>&,
const __mt_alloc<_Tp>&)
00653 {
return false; }
00654
00655
template<
typename _Tp>
00656
bool __mt_alloc<_Tp>::_S_init =
false;
00657
00658
template<
typename _Tp>
00659
typename __mt_alloc<_Tp>::_Tune __mt_alloc<_Tp>::_S_options;
00660
00661
template<
typename _Tp>
00662
typename __mt_alloc<_Tp>::_Binmap_type* __mt_alloc<_Tp>::_S_binmap;
00663
00664
template<
typename _Tp>
00665
typename __mt_alloc<_Tp>::_Bin_record*
volatile __mt_alloc<_Tp>::_S_bin;
00666
00667
template<
typename _Tp>
00668 size_t __mt_alloc<_Tp>::_S_bin_size = 1;
00669
00670
00671
#ifdef __GTHREADS
00672
template<
typename _Tp>
00673 __gthread_once_t __mt_alloc<_Tp>::_S_once = __GTHREAD_ONCE_INIT;
00674
00675
template<
typename _Tp>
00676
typename __mt_alloc<_Tp>::_Thread_record*
00677
volatile __mt_alloc<_Tp>::_S_thread_freelist_first = NULL;
00678
00679
template<
typename _Tp>
00680 __gthread_key_t __mt_alloc<_Tp>::_S_thread_key;
00681
00682
template<
typename _Tp>
00683 __gthread_mutex_t
00684
#ifdef __GTHREAD_MUTEX_INIT
00685
__mt_alloc<_Tp>::_S_thread_freelist_mutex = __GTHREAD_MUTEX_INIT;
00686
#else
00687
__mt_alloc<_Tp>::_S_thread_freelist_mutex;
00688
#endif
00689
#endif
00690
}
00691
00692
#endif