/* * * Copyright (c) 1996,1997 * Silicon Graphics Computer Systems, Inc. * * Copyright (c) 1997 * Moscow Center for SPARC Technology * * Copyright (c) 1999 * Boris Fomitchev * * This material is provided "as is", with absolutely no warranty expressed * or implied. Any use is at your own risk. * * Permission to use or copy this software for any purpose is hereby granted * without fee, provided the above notices are retained on all copies. * Permission to modify the code and to distribute modified code is granted, * provided the above notices are retained, and a notice that the code was * modified is included with the above copyright notice. * */ #ifndef __STL_ALLOC_C #define __STL_ALLOC_C # if defined (__BUILDING_STLPORT) || ! defined (__SGI_STL_OWN_IOSTREAMS) // Specialised debug form of malloc which does not provide "false" // memory leaks when run with debug CRT libraries. #if defined(__STL_MSVC) && (__STL_MSVC>=1020 && defined(_DEBUG)) && ! defined (__STL_WINCE) # include # define __STL_CHUNK_MALLOC(s) _malloc_dbg(s, _CRT_BLOCK, __FILE__, __LINE__) #else // !_DEBUG # ifdef __STL_NODE_ALLOC_USE_MALLOC # include # define __STL_CHUNK_MALLOC(s) __STL_VENDOR_CSTD::malloc(s) # else # define __STL_CHUNK_MALLOC(s) __stl_new(s) # endif #endif // !_DEBUG #define _S_FREELIST_INDEX(__bytes) ((__bytes-size_t(1))>>(int)_ALIGN_SHIFT) __STL_BEGIN_NAMESPACE template void * __STL_CALL __malloc_alloc<__inst>::_S_oom_malloc(size_t __n) { __oom_handler_type __my_malloc_handler; void * __result; for (;;) { __my_malloc_handler = __oom_handler; if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; } (*__my_malloc_handler)(); __result = malloc(__n); if (__result) return(__result); } #if defined(__STL_NEED_UNREACHABLE_RETURN) return 0; #endif } template void * __STL_CALL __debug_alloc<_Alloc>::allocate(size_t __n) { size_t __real_n = __n + __extra_before_chunk() + __extra_after_chunk(); __alloc_header *__result = (__alloc_header *)__allocator_type::allocate(__real_n); memset((char*)__result, __shred_byte, __real_n*sizeof(value_type)); __result->__magic = __magic; __result->__type_size = sizeof(value_type); __result->_M_size = __n; return ((char*)__result) + (long)__extra_before; } template void __STL_CALL __debug_alloc<_Alloc>::deallocate(void *__p, size_t __n) { __alloc_header * __real_p = (__alloc_header*)((char *)__p -(long)__extra_before); // check integrity __STL_VERBOSE_ASSERT(__real_p->__magic != __deleted_magic, _StlMsg_DBA_DELETED_TWICE) __STL_VERBOSE_ASSERT(__real_p->__magic == __magic, _StlMsg_DBA_NEVER_ALLOCATED) __STL_VERBOSE_ASSERT(__real_p->__type_size == 1,_StlMsg_DBA_TYPE_MISMATCH) __STL_VERBOSE_ASSERT(__real_p->_M_size == __n, _StlMsg_DBA_SIZE_MISMATCH) // check pads on both sides unsigned char* __tmp; for (__tmp= (unsigned char*)(__real_p+1); __tmp < (unsigned char*)__p; __tmp++) { __STL_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_UNDERRUN) } size_t __real_n= __n + __extra_before_chunk() + __extra_after_chunk(); for (__tmp= ((unsigned char*)__p)+__n*sizeof(value_type); __tmp < ((unsigned char*)__real_p)+__real_n ; __tmp++) { __STL_VERBOSE_ASSERT(*__tmp==__shred_byte, _StlMsg_DBA_OVERRUN) } // that may be unfortunate, just in case __real_p->__magic=__deleted_magic; memset((char*)__p, __shred_byte, __n*sizeof(value_type)); __allocator_type::deallocate(__real_p, __real_n); } # ifdef __STL_THREADS template class __STL_CLASS_DECLSPEC _Node_Alloc_Lock { public: _Node_Alloc_Lock() { # ifdef __STL_SGI_THREADS if (__threads && __us_rsthread_malloc) # else /* !__STL_SGI_THREADS */ if (__threads) # endif _S_lock._M_acquire_lock(); } ~_Node_Alloc_Lock() { # ifdef __STL_SGI_THREADS if (__threads && __us_rsthread_malloc) # else /* !__STL_SGI_THREADS */ if (__threads) # endif _S_lock._M_release_lock(); } static _STL_STATIC_MUTEX _S_lock; }; # endif /* __STL_THREADS */ template void* __STL_CALL __node_alloc<__threads, __inst>::allocate(size_t __n) { void* __r; if (__n > (size_t)_MAX_BYTES) { # ifdef __STL_NODE_ALLOC_USE_MALLOC __r = __STL_VENDOR_CSTD::malloc(__n); # else __r = __stl_new(__n); # endif } else { _Obj * __STL_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); # ifdef __STL_THREADS /*REFERENCED*/ _Node_Alloc_Lock<__threads, __inst> __lock_instance; # endif // Acquire the lock here with a constructor call. // This ensures that it is released in exit or during stack // unwinding. if ( (__r = *__my_free_list) != 0 ) { *__my_free_list = ((_Obj*)__r) -> _M_free_list_link; } else { __r = _S_refill(__n); } // lock is released here } return __r; } template void __STL_CALL __node_alloc<__threads, __inst>::deallocate(void *__p, size_t __n) { if (__n > (size_t) _MAX_BYTES) { # ifdef __STL_NODE_ALLOC_USE_MALLOC __STL_VENDOR_CSTD::free(__p); # else __stl_delete(__p); # endif } else { _Obj * __STL_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); # ifdef __STL_THREADS /*REFERENCED*/ _Node_Alloc_Lock<__threads, __inst> __lock_instance; # endif /* __STL_THREADS */ // acquire lock ((_Obj *)__p) -> _M_free_list_link = *__my_free_list; *__my_free_list = (_Obj *)__p; // lock is released here } } /* We allocate memory in large chunks in order to avoid fragmenting */ /* the malloc heap too much. */ /* We assume that size is properly aligned. */ /* We hold the allocation lock. */ template char* __STL_CALL __node_alloc<__threads, __inst>::_S_chunk_alloc(size_t _p_size, int& __nobjs) { char* __result; size_t __total_bytes = _p_size * __nobjs; size_t __bytes_left = _S_end_free - _S_start_free; if (__bytes_left >= __total_bytes) { __result = _S_start_free; _S_start_free += __total_bytes; return(__result); } else if (__bytes_left >= _p_size) { __nobjs = (int)(__bytes_left/_p_size); __total_bytes = _p_size * __nobjs; __result = _S_start_free; _S_start_free += __total_bytes; return(__result); } else { size_t __bytes_to_get = 2 * __total_bytes + _S_round_up(_S_heap_size >> 4); // Try to make use of the left-over piece. if (__bytes_left > 0) { _Obj* __STL_VOLATILE* __my_free_list = _S_free_list + _S_FREELIST_INDEX(__bytes_left); ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list; *__my_free_list = (_Obj*)_S_start_free; } _S_start_free = (char*)__STL_CHUNK_MALLOC(__bytes_to_get); if (0 == _S_start_free) { size_t __i; _Obj* __STL_VOLATILE* __my_free_list; _Obj* __p; // Try to make do with what we have. That can't // hurt. We do not try smaller requests, since that tends // to result in disaster on multi-process machines. for (__i = _p_size; __i <= (size_t)_MAX_BYTES; __i += (size_t)_ALIGN) { __my_free_list = _S_free_list + _S_FREELIST_INDEX(__i); __p = *__my_free_list; if (0 != __p) { *__my_free_list = __p -> _M_free_list_link; _S_start_free = (char*)__p; _S_end_free = _S_start_free + __i; return(_S_chunk_alloc(_p_size, __nobjs)); // Any leftover piece will eventually make it to the // right free list. } } _S_end_free = 0; // In case of exception. _S_start_free = (char*)__STL_CHUNK_MALLOC(__bytes_to_get); /* (char*)malloc_alloc::allocate(__bytes_to_get); */ // This should either throw an // exception or remedy the situation. Thus we assume it // succeeded. } _S_heap_size += __bytes_to_get; _S_end_free = _S_start_free + __bytes_to_get; return(_S_chunk_alloc(_p_size, __nobjs)); } } /* Returns an object of size __n, and optionally adds to size __n free list.*/ /* We assume that __n is properly aligned. */ /* We hold the allocation lock. */ template void* __STL_CALL __node_alloc<__threads, __inst>::_S_refill(size_t __n) { int __nobjs = 20; __n = _S_round_up(__n); char* __chunk = _S_chunk_alloc(__n, __nobjs); _Obj* __STL_VOLATILE* __my_free_list; _Obj* __result; _Obj* __current_obj; _Obj* __next_obj; int __i; if (1 == __nobjs) return(__chunk); __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); /* Build free list in chunk */ __result = (_Obj*)__chunk; *__my_free_list = __next_obj = (_Obj*)(__chunk + __n); for (__i = 1; ; __i++) { __current_obj = __next_obj; __next_obj = (_Obj*)((char*)__next_obj + __n); if (__nobjs - 1 == __i) { __current_obj -> _M_free_list_link = 0; break; } else { __current_obj -> _M_free_list_link = __next_obj; } } return(__result); } # if ( __STL_STATIC_TEMPLATE_DATA > 0 ) // malloc_alloc out-of-memory handling template __oom_handler_type __malloc_alloc<__inst>::__oom_handler=(__oom_handler_type)0 ; #ifdef __STL_THREADS template _STL_STATIC_MUTEX _Node_Alloc_Lock<__threads, __inst>::_S_lock __STL_MUTEX_INITIALIZER; #endif template _Node_alloc_obj * __STL_VOLATILE __node_alloc<__threads, __inst>::_S_free_list[_NFREELISTS] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; // The 16 zeros are necessary to make version 4.1 of the SunPro // compiler happy. Otherwise it appears to allocate too little // space for the array. template char *__node_alloc<__threads, __inst>::_S_start_free = 0; template char *__node_alloc<__threads, __inst>::_S_end_free = 0; template size_t __node_alloc<__threads, __inst>::_S_heap_size = 0; # else /* ( __STL_STATIC_TEMPLATE_DATA > 0 ) */ __DECLARE_INSTANCE(__oom_handler_type, __malloc_alloc<0>::__oom_handler, =0); # define __STL_ALLOC_NOTHREADS __node_alloc # define __STL_ALLOC_THREADS __node_alloc # define __STL_ALLOC_NOTHREADS_LOCK _Node_Alloc_Lock # define __STL_ALLOC_THREADS_LOCK _Node_Alloc_Lock __DECLARE_INSTANCE(char *, __STL_ALLOC_NOTHREADS::_S_start_free,=0); __DECLARE_INSTANCE(char *, __STL_ALLOC_NOTHREADS::_S_end_free,=0); __DECLARE_INSTANCE(size_t, __STL_ALLOC_NOTHREADS::_S_heap_size,=0); __DECLARE_INSTANCE(_Node_alloc_obj * __STL_VOLATILE, __STL_ALLOC_NOTHREADS::_S_free_list[_NFREELISTS], ={0}); __DECLARE_INSTANCE(char *, __STL_ALLOC_THREADS::_S_start_free,=0); __DECLARE_INSTANCE(char *, __STL_ALLOC_THREADS::_S_end_free,=0); __DECLARE_INSTANCE(size_t, __STL_ALLOC_THREADS::_S_heap_size,=0); __DECLARE_INSTANCE(_Node_alloc_obj * __STL_VOLATILE, __STL_ALLOC_THREADS::_S_free_list[_NFREELISTS], ={0}); # ifdef __STL_THREADS __DECLARE_INSTANCE(_STL_STATIC_MUTEX, __STL_ALLOC_NOTHREADS_LOCK::_S_lock, __STL_MUTEX_INITIALIZER); __DECLARE_INSTANCE(_STL_STATIC_MUTEX, __STL_ALLOC_THREADS_LOCK::_S_lock, __STL_MUTEX_INITIALIZER); # endif # undef __STL_ALLOC_THREADS # undef __STL_ALLOC_NOTHREADS # endif /* __STL_STATIC_TEMPLATE_DATA */ __STL_END_NAMESPACE # undef _S_FREELIST_INDEX # endif /* OWN_IOSTREAMS */ #endif /* __STL_ALLOC_C */ // Local Variables: // mode:C++ // End: