27#if defined (__GNUC__) && (defined (__CYGWIN__) || defined (__MINGW32__))
31#if defined (_STLP_PTHREADS) && !defined (_STLP_NO_THREADS)
32# include <pthread_alloc>
40#if defined (__WATCOMC__)
46#if defined (_STLP_SGI_THREADS)
52 extern int __us_rsthread_malloc;
58#if defined (_STLP_MSVC) && (_STLP_MSVC >= 1020 && defined (_STLP_DEBUG_ALLOC)) && !defined (_STLP_WCE)
66# ifdef _STLP_NODE_ALLOC_USE_MALLOC
70 void *__chunk = _STLP_VENDOR_CSTD::malloc(__bytes);
79{
return __STATIC_CAST(
char*, _STLP_STD::__stl_new(__bytes)); }
91#if defined (__GNUC__) && defined (__i386__)
92inline long _STLP_atomic_add_gcc_x86(
long volatile*
p,
long addend) {
95 (
"lock; xaddl %1, %0;"
97 :
"m" (*
p),
"1" (addend)
101# define _STLP_ATOMIC_ADD(__dst, __val) _STLP_atomic_add_gcc_x86(__dst, __val)
102#elif defined (_STLP_WIN32THREADS)
104# if !defined (_STLP_WIN95_LIKE)
105# if defined (_STLP_NEW_PLATFORM_SDK)
106# define _STLP_ATOMIC_ADD(__dst, __val) InterlockedExchangeAdd(__dst, __val)
108# define _STLP_ATOMIC_ADD(__dst, __val) InterlockedExchangeAdd(__CONST_CAST(__add_atomic_t*, __dst), __val)
113#if defined (__OS400__)
120#define _S_FREELIST_INDEX(__bytes) ((__bytes - size_t(1)) >> (int)_ALIGN_SHIFT)
134 if ( 0 == __result ) {
144 if ( 0 == __my_malloc_handler) {
147 (*__my_malloc_handler)();
180#define _STLP_NFREELISTS 16
182#if defined (_STLP_LEAKS_PEDANTIC) && defined (_STLP_USE_DYNAMIC_LIB)
190# define _STLP_DO_CLEAN_NODE_ALLOC
198#if defined (_STLP_THREADS) && \
199 defined (_STLP_HAS_ATOMIC_FREELIST) && defined (_STLP_ATOMIC_ADD)
204# define _STLP_USE_LOCK_FREE_IMPLEMENTATION
207#if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
208# if defined (_STLP_THREADS)
217# if defined (_STLP_SGI_THREADS)
218 if (__us_rsthread_malloc)
220 _S_Mutex()._M_acquire_lock();
224# if defined (_STLP_SGI_THREADS)
225 if (__us_rsthread_malloc)
227 _S_Mutex()._M_release_lock();
248 {
return (((__bytes) + (
size_t)
_ALIGN-1) & ~((
size_t)
_ALIGN - 1)); }
250#if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
251 typedef _STLP_atomic_freelist::item
_Obj;
257 struct _FreeBlockHeader :
public _STLP_atomic_freelist::item {
275#if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
281#if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
283 static _STLP_atomic_freelist _S_free_mem_blocks;
291#if defined (_STLP_DO_CLEAN_NODE_ALLOC)
294# if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
299 static _AllocCounter&
_STLP_CALL _S_alloc_counter();
300 static void _S_alloc_call();
301 static void _S_dealloc_call();
305 static void _S_chunk_dealloc();
317#if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
328 if ( (__r = *__my_free_list) != 0 ) {
329 *__my_free_list = __r->
_M_next;
333# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
346 __pobj->
_M_next = *__my_free_list;
347 *__my_free_list = __pobj;
349# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
355# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
356# define _STLP_OFFSET sizeof(_Obj)
358# define _STLP_OFFSET 0
367 size_t __total_bytes = _p_size * __nobjs;
370 if (__bytes_left > 0) {
371 if (__bytes_left >= __total_bytes) {
377 if (__bytes_left >= _p_size) {
378 __nobjs = (
int)(__bytes_left / _p_size);
379 __total_bytes = _p_size * __nobjs;
397#if defined (_STLP_USE_EXCEPTIONS)
398 catch (
const _STLP_STD::bad_alloc&) {
406 __p = *__my_free_list;
408 *__my_free_list = __p -> _M_next;
422# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
448 for (--__nobjs; --__nobjs; ) {
449 __current_obj = __next_obj;
451 __current_obj->
_M_next = __next_obj;
457# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
458void __node_alloc_impl::_S_alloc_call()
459{ ++_S_alloc_counter(); }
461void __node_alloc_impl::_S_dealloc_call() {
464 { _S_chunk_dealloc(); }
468void __node_alloc_impl::_S_chunk_dealloc() {
469 _Obj *__pcur = _S_chunks, *__pnext;
470 while (__pcur != 0) {
490# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
499# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
516 _Obj* __cur_item = __result;
518 for (--__nobjs; __nobjs != 0; --__nobjs) {
520 __my_freelist->push(__cur_item);
525# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
526# define _STLP_OFFSET _ALIGN
528# define _STLP_OFFSET 0
535# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
544 _FreeBlockHeader* __block =
__STATIC_CAST(_FreeBlockHeader*, _S_free_mem_blocks.pop());
553 __result = __buf_start;
554 __nobjs = (
int)(__bytes_left/_p_size);
556 __bytes_left -= __total_bytes;
557 __buf_start += __total_bytes;
559 else if (__bytes_left >= __total_bytes) {
561 __result = __buf_start;
562 __bytes_left -= __total_bytes;
563 __buf_start += __total_bytes;
566 if (__bytes_left != 0) {
568 if ((__result != 0) && (__bytes_left >= (
__add_atomic_t)
sizeof(_FreeBlockHeader))) {
572 __newblock->_M_end = __block->_M_end;
573 _S_free_mem_blocks.push(__newblock);
579 if (__rounded_down > 0)
595#if defined (_STLP_USE_EXCEPTIONS)
596 catch (
const bad_alloc&) {
601 if (__i <
sizeof(_FreeBlockHeader)) {
604 __nobjs = (
int)(__i/_p_size);
606 size_t __bytes_left = __i - __total_bytes;
608 if (__rounded_down > 0) {
615 _FreeBlockHeader* __newblock = (_FreeBlockHeader*)__p;
617 _S_free_mem_blocks.push(__newblock);
633 _StlMsg_DBA_DELETED_TWICE)
636# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
643 if (__bytes_to_get > __total_bytes) {
645 _FreeBlockHeader* __freeblock =
__REINTERPRET_CAST(_FreeBlockHeader*, __result + __total_bytes);
646 __freeblock->_M_end = __result + __bytes_to_get;
647 _S_free_mem_blocks.push(__freeblock);
652# if defined (_STLP_DO_CLEAN_NODE_ALLOC)
653void __node_alloc_impl::_S_alloc_call()
656void __node_alloc_impl::_S_dealloc_call() {
663void __node_alloc_impl::_S_chunk_dealloc() {
673 _S_free_mem_blocks.clear();
681 _Obj* __chunk = _S_chunks.clear();
682 while (__chunk != 0) {
692#if defined (_STLP_DO_CLEAN_NODE_ALLOC)
693struct __node_alloc_cleaner {
694 ~__node_alloc_cleaner()
695 { __node_alloc_impl::_S_dealloc_call(); }
698# if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
703__node_alloc_impl::_S_alloc_counter() {
704 static _AllocCounter _S_counter = 1;
705 static __node_alloc_cleaner _S_node_alloc_cleaner;
710#if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
713= {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
719_STLP_atomic_freelist __node_alloc_impl::_S_free_mem_blocks;
722#if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
727#if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
734#if defined (_STLP_DO_CLEAN_NODE_ALLOC)
735# if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
736_STLP_atomic_freelist __node_alloc_impl::_S_chunks;
748#if defined (_STLP_PTHREADS) && !defined (_STLP_NO_THREADS)
750# define _STLP_DATA_ALIGNMENT 8
756union _Pthread_alloc_obj {
757 union _Pthread_alloc_obj * __free_list_link;
758 char __client_data[_STLP_DATA_ALIGNMENT];
765struct _Pthread_alloc_per_thread_state {
766 typedef _Pthread_alloc_obj __obj;
767 enum { _S_NFREELISTS =
_MAX_BYTES / _STLP_DATA_ALIGNMENT };
774 _Pthread_alloc_per_thread_state() : __next(0)
775 {
memset((
void *)
__CONST_CAST(_Pthread_alloc_obj**, __free_list), 0, (
size_t)_S_NFREELISTS *
sizeof(__obj *)); }
777 void *_M_refill(
size_t __n);
779 _Pthread_alloc_obj*
volatile __free_list[_S_NFREELISTS];
780 _Pthread_alloc_per_thread_state *__next;
786class _Pthread_alloc_impl {
788 typedef _Pthread_alloc_per_thread_state __state_type;
789 typedef char value_type;
793 static char *_S_chunk_alloc(
size_t __size,
size_t &__nobjs, __state_type*);
795 enum {_S_ALIGN = _STLP_DATA_ALIGNMENT};
797 static size_t _S_round_up(
size_t __bytes)
798 {
return (((__bytes) + (
int)_S_ALIGN - 1) & ~((
int)_S_ALIGN - 1)); }
799 static size_t _S_freelist_index(
size_t __bytes)
800 {
return (((__bytes) + (
int)_S_ALIGN - 1) / (
int)_S_ALIGN - 1); }
806 static char *_S_start_free;
807 static char *_S_end_free;
808 static size_t _S_heap_size;
809 static __state_type *_S_free_per_thread_states;
810 static pthread_key_t _S_key;
811 static bool _S_key_initialized;
814 static void _S_destructor(
void *
instance);
817 static __state_type *_S_new_per_thread_state();
820 static __state_type *_S_get_per_thread_state();
825 friend class _M_lock;
828 _M_lock () { _S_chunk_allocator_lock._M_acquire_lock(); }
829 ~_M_lock () { _S_chunk_allocator_lock._M_release_lock(); }
835 static void * allocate(
size_t&
__n);
838 static void deallocate(
void *__p,
size_t __n);
842 static void * allocate(
size_t&
__n, __state_type* __a);
845 static void deallocate(
void *__p,
size_t __n, __state_type* __a);
847 static void * reallocate(
void *__p,
size_t __old_sz,
size_t& __new_sz);
853void *_Pthread_alloc_per_thread_state::_M_refill(
size_t __n) {
854 typedef _Pthread_alloc_obj __obj;
855 size_t __nobjs = 128;
856 char * __chunk = _Pthread_alloc_impl::_S_chunk_alloc(
__n, __nobjs,
this);
857 __obj *
volatile * __my_free_list;
859 __obj * __current_obj, * __next_obj;
866 __my_free_list = __free_list + _Pthread_alloc_impl::_S_freelist_index(
__n);
869 __result = (__obj *)__chunk;
870 *__my_free_list = __next_obj = (__obj *)(__chunk +
__n);
871 for (__i = 1; ; ++__i) {
872 __current_obj = __next_obj;
873 __next_obj = (__obj *)((
char *)__next_obj +
__n);
874 if (__nobjs - 1 == __i) {
875 __current_obj -> __free_list_link = 0;
878 __current_obj -> __free_list_link = __next_obj;
884void _Pthread_alloc_impl::_S_destructor(
void *__instance) {
885 _M_lock __lock_instance;
886 _Pthread_alloc_per_thread_state* __s = (_Pthread_alloc_per_thread_state*)__instance;
887 __s -> __next = _S_free_per_thread_states;
888 _S_free_per_thread_states = __s;
891_Pthread_alloc_per_thread_state* _Pthread_alloc_impl::_S_new_per_thread_state() {
893 if (0 != _S_free_per_thread_states) {
894 _Pthread_alloc_per_thread_state *__result = _S_free_per_thread_states;
895 _S_free_per_thread_states = _S_free_per_thread_states -> __next;
899 return new _Pthread_alloc_per_thread_state;
903_Pthread_alloc_per_thread_state* _Pthread_alloc_impl::_S_get_per_thread_state() {
905 __state_type* __result;
907 if (_S_key_initialized && (__result = (__state_type*) pthread_getspecific(_S_key)))
911 _M_lock __lock_instance;
912 if (!_S_key_initialized) {
913 if (pthread_key_create(&_S_key, _S_destructor)) {
916 _S_key_initialized =
true;
919 __result = _S_new_per_thread_state();
920 __ret_code = pthread_setspecific(_S_key, __result);
922 if (__ret_code ==
ENOMEM) {
935char *_Pthread_alloc_impl::_S_chunk_alloc(
size_t __p_size,
size_t &__nobjs, _Pthread_alloc_per_thread_state *__a) {
936 typedef _Pthread_alloc_obj __obj;
939 size_t __total_bytes;
942 _M_lock __lock_instance;
944 __total_bytes = __p_size * __nobjs;
945 __bytes_left = _S_end_free - _S_start_free;
946 if (__bytes_left >= __total_bytes) {
947 __result = _S_start_free;
948 _S_start_free += __total_bytes;
950 }
else if (__bytes_left >= __p_size) {
951 __nobjs = __bytes_left/__p_size;
952 __total_bytes = __p_size * __nobjs;
953 __result = _S_start_free;
954 _S_start_free += __total_bytes;
957 size_t __bytes_to_get = 2 * __total_bytes + _S_round_up(_S_heap_size);
959 if (__bytes_left > 0) {
960 __obj *
volatile * __my_free_list = __a->__free_list + _S_freelist_index(__bytes_left);
961 ((__obj *)_S_start_free) -> __free_list_link = *__my_free_list;
962 *__my_free_list = (__obj *)_S_start_free;
970 const int __cache_line_size = 128;
971 __bytes_to_get &= ~(__cache_line_size-1);
972 _S_start_free = (
char *)memalign(__cache_line_size, __bytes_to_get);
973 if (0 == _S_start_free) {
980 _S_heap_size += __bytes_to_get >> 4;
981 _S_end_free = _S_start_free + __bytes_to_get;
985 return _S_chunk_alloc(__p_size, __nobjs, __a);
990void *_Pthread_alloc_impl::allocate(
size_t&
__n) {
991 typedef _Pthread_alloc_obj __obj;
992 __obj *
volatile * __my_free_list;
1001 __a = _S_get_per_thread_state();
1003 __my_free_list = __a->__free_list + _S_freelist_index(
__n);
1004 __result = *__my_free_list;
1005 if (__result == 0) {
1006 void *__r = __a->_M_refill(
__n);
1009 *__my_free_list = __result->__free_list_link;
1014void _Pthread_alloc_impl::deallocate(
void *__p,
size_t __n) {
1015 typedef _Pthread_alloc_obj __obj;
1016 __obj *__q = (__obj *)__p;
1017 __obj *
volatile * __my_free_list;
1025 __a = _S_get_per_thread_state();
1027 __my_free_list = __a->__free_list + _S_freelist_index(
__n);
1028 __q -> __free_list_link = *__my_free_list;
1029 *__my_free_list = __q;
1034void *_Pthread_alloc_impl::allocate(
size_t&
__n, __state_type* __a) {
1035 typedef _Pthread_alloc_obj __obj;
1036 __obj *
volatile * __my_free_list;
1048 __my_free_list = __a->__free_list + _S_freelist_index(
__n);
1049 __result = *__my_free_list;
1050 if (__result == 0) {
1051 void *__r = __a->_M_refill(
__n);
1054 *__my_free_list = __result->__free_list_link;
1059void _Pthread_alloc_impl::deallocate(
void *__p,
size_t __n, __state_type* __a) {
1060 typedef _Pthread_alloc_obj __obj;
1061 __obj *__q = (__obj *)__p;
1062 __obj *
volatile * __my_free_list;
1073 __my_free_list = __a->__free_list + _S_freelist_index(
__n);
1074 __q -> __free_list_link = *__my_free_list;
1075 *__my_free_list = __q;
1078void *_Pthread_alloc_impl::reallocate(
void *__p,
size_t __old_sz,
size_t& __new_sz) {
1083 return realloc(__p, __new_sz);
1086 if (_S_round_up(__old_sz) == _S_round_up(__new_sz))
return __p;
1087 __result = allocate(__new_sz);
1088 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
1089 memcpy(__result, __p, __copy_sz);
1090 deallocate(__p, __old_sz);
1094_Pthread_alloc_per_thread_state* _Pthread_alloc_impl::_S_free_per_thread_states = 0;
1095pthread_key_t _Pthread_alloc_impl::_S_key = 0;
1097bool _Pthread_alloc_impl::_S_key_initialized =
false;
1098char *_Pthread_alloc_impl::_S_start_free = 0;
1099char *_Pthread_alloc_impl::_S_end_free = 0;
1100size_t _Pthread_alloc_impl::_S_heap_size = 0;
1103{
return _Pthread_alloc_impl::allocate(
__n); }
1105{ _Pthread_alloc_impl::deallocate(__p,
__n); }
1107{
return _Pthread_alloc_impl::allocate(
__n, __a); }
1109{ _Pthread_alloc_impl::deallocate(__p,
__n, __a); }
1111{
return _Pthread_alloc_impl::reallocate(__p, __old_sz, __new_sz); }
1113{
return _Pthread_alloc_impl::_S_get_per_thread_state(); }
1121#undef _S_FREELIST_INDEX
_STLP_BEGIN_NAMESPACE typedef void(* __oom_handler_type)()
#define _STLP_VERBOSE_ASSERT(expr, diagnostic)
#define _STLP_CHECK_NULL_ALLOC(__x)
#define _STLP_THROW_BAD_ALLOC
#define _STLP_ATOMIC_DECREMENT(__x)
#define _STLP_ATOMIC_INCREMENT(__x)
#define _STLP_MUTEX_INITIALIZER
static _STLP_BEGIN_NAMESPACE __oom_handler_type __oom_handler
unsigned long __uadd_atomic_t
#define _S_FREELIST_INDEX(__bytes)
void __stlp_delete_chunck(void *__p)
char * __stlp_new_chunk(size_t __bytes)
static void *_STLP_CALL reallocate(void *__p, size_t __old_sz, size_t &__new_sz)
static __state_type *_STLP_CALL _S_get_per_thread_state()
static void _STLP_CALL deallocate(void *__p, size_t __n)
static void *_STLP_CALL allocate(size_t &__n)
static __oom_handler_type _STLP_CALL set_malloc_handler(__oom_handler_type __f)
static void _STLP_CALL deallocate(void *__p, size_t)
static void *_STLP_CALL allocate(size_t __n)
static size_t _STLP_CALL _S_round_up(size_t __bytes)
static char * _S_start_free
static _Obj * _S_refill(size_t __n)
static void _M_deallocate(void *__p, size_t __n)
static char * _S_end_free
static _Freelist _S_free_list[_STLP_NFREELISTS]
static char * _S_chunk_alloc(size_t __p_size, int &__nobjs)
static void * _M_allocate(size_t &__n)
_Obj *_STLP_VOLATILE _Freelist
static size_t _S_heap_size
static void _STLP_CALL _M_deallocate(void *__p, size_t __n)
static void *_STLP_CALL _M_allocate(size_t &__n)
static HINSTANCE instance
unsigned int(__cdecl typeof(jpeg_read_scanlines))(struct jpeg_decompress_struct *
#define __REINTERPRET_CAST(__x, __y)
#define _STLP_STATIC_ASSERT(expr)
#define _STLP_MOVE_TO_STD_NAMESPACE
#define _STLP_STATIC_MUTEX
#define __STATIC_CAST(__x, __y)
#define __CONST_CAST(__x, __y)
#define _STLP_BEGIN_NAMESPACE
#define _STLP_END_NAMESPACE
#define _STLP_MOVE_TO_PRIV_NAMESPACE
#define memcpy(s1, s2, n)
__asm__(".p2align 4, 0x90\n" ".seh_proc __seh2_global_filter_func\n" "__seh2_global_filter_func:\n" "\tsub %rbp, %rax\n" "\tpush %rbp\n" "\t.seh_pushreg %rbp\n" "\tsub $32, %rsp\n" "\t.seh_stackalloc 32\n" "\t.seh_endprologue\n" "\tsub %rax, %rdx\n" "\tmov %rdx, %rbp\n" "\tjmp *%r8\n" "__seh2_global_filter_func_exit:\n" "\t.p2align 4\n" "\tadd $32, %rsp\n" "\tpop %rbp\n" "\tret\n" "\t.seh_endproc")
_Node_alloc_obj * _M_next