ossrv_pub/thread_api/inc/stdapis/stlport/stl/_pthread_alloc.c
changeset 0 e4d67989cc36
equal deleted inserted replaced
-1:000000000000 0:e4d67989cc36
       
     1 /*
       
     2  *
       
     3  * Copyright (c) 1996,1997
       
     4  * Silicon Graphics Computer Systems, Inc.
       
     5  *
       
     6  * Copyright (c) 1997
       
     7  * Moscow Center for SPARC Technology
       
     8  *
       
     9  * Copyright (c) 1999 
       
    10  * Boris Fomitchev
       
    11  *
       
    12  * This material is provided "as is", with absolutely no warranty expressed
       
    13  * or implied. Any use is at your own risk.
       
    14  *
       
    15  * Permission to use or copy this software for any purpose is hereby granted 
       
    16  * without fee, provided the above notices are retained on all copies.
       
    17  * Permission to modify the code and to distribute modified code is granted,
       
    18  * provided the above notices are retained, and a notice that the code was
       
    19  * modified is included with the above copyright notice.
       
    20  *
       
    21  */
       
    22 #ifndef _STLP_PTHREAD_ALLOC_C
       
    23 #define _STLP_PTHREAD_ALLOC_C
       
    24 
       
    25 #ifdef __WATCOMC__
       
    26 #pragma warning 13 9
       
    27 #pragma warning 367 9
       
    28 #pragma warning 368 9
       
    29 #endif
       
    30 
       
    31 #ifndef _STLP_PTHREAD_ALLOC_H
       
    32 # include <stl/_pthread_alloc.h>
       
    33 #endif
       
    34 
       
    35 # if defined (_STLP_EXPOSE_GLOBALS_IMPLEMENTATION)
       
    36 
       
    37 # include <cerrno>
       
    38 
       
    39 _STLP_BEGIN_NAMESPACE
       
    40 
       
    41 template <size_t _Max_size>
       
    42 void _Pthread_alloc<_Max_size>::_S_destructor(void * __instance)
       
    43 {
       
    44     _M_lock __lock_instance;	// Need to acquire lock here.
       
    45     _Pthread_alloc_per_thread_state<_Max_size>* __s =
       
    46         (_Pthread_alloc_per_thread_state<_Max_size> *)__instance;
       
    47     __s -> __next = _S_free_per_thread_states;
       
    48     _S_free_per_thread_states = __s;
       
    49 }
       
    50 
       
    51 template <size_t _Max_size>
       
    52 _Pthread_alloc_per_thread_state<_Max_size> *
       
    53 _Pthread_alloc<_Max_size>::_S_new_per_thread_state()
       
    54 {    
       
    55     /* lock already held here.	*/
       
    56     if (0 != _S_free_per_thread_states) {
       
    57         _Pthread_alloc_per_thread_state<_Max_size> *__result =
       
    58 					_S_free_per_thread_states;
       
    59         _S_free_per_thread_states = _S_free_per_thread_states -> __next;
       
    60         return __result;
       
    61     } else {
       
    62         return (_Pthread_alloc_per_thread_state<_Max_size>*) \
       
    63                 _STLP_PLACEMENT_NEW (_Pthread_alloc_per_thread_state<_Max_size>);
       
    64     }
       
    65 }
       
    66 
       
    67 template <size_t _Max_size>
       
    68 _Pthread_alloc_per_thread_state<_Max_size> *
       
    69 _Pthread_alloc<_Max_size>::_S_get_per_thread_state()
       
    70 {
       
    71 
       
    72     int __ret_code;
       
    73     __state_type* __result;
       
    74     
       
    75     if (_S_key_initialized && (__result = (__state_type*) pthread_getspecific(_S_key)))
       
    76       return __result;
       
    77     
       
    78     /*REFERENCED*/
       
    79     _M_lock __lock_instance;	// Need to acquire lock here.
       
    80     if (!_S_key_initialized) {
       
    81       if (pthread_key_create(&_S_key, _S_destructor)) {
       
    82 	__THROW_BAD_ALLOC;  // failed
       
    83       }
       
    84       _S_key_initialized = true;
       
    85     }
       
    86 
       
    87     __result = _S_new_per_thread_state();
       
    88     __ret_code = pthread_setspecific(_S_key, __result);
       
    89     if (__ret_code) {
       
    90       if (__ret_code == ENOMEM) {
       
    91 	__THROW_BAD_ALLOC;
       
    92       } else {
       
    93 	// EINVAL
       
    94 	_STLP_ABORT();
       
    95       }
       
    96     }
       
    97     return __result;
       
    98 }
       
    99 
       
   100 /* We allocate memory in large chunks in order to avoid fragmenting     */
       
   101 /* the malloc heap too much.                                            */
       
   102 /* We assume that size is properly aligned.                             */
       
   103 template <size_t _Max_size>
       
   104 char *_Pthread_alloc<_Max_size>
       
   105 ::_S_chunk_alloc(size_t __p_size, size_t &__nobjs)
       
   106 {
       
   107   {
       
   108     char * __result;
       
   109     size_t __total_bytes;
       
   110     size_t __bytes_left;
       
   111     /*REFERENCED*/
       
   112     _M_lock __lock_instance;         // Acquire lock for this routine
       
   113 
       
   114     __total_bytes = __p_size * __nobjs;
       
   115     __bytes_left = _S_end_free - _S_start_free;
       
   116     if (__bytes_left >= __total_bytes) {
       
   117         __result = _S_start_free;
       
   118         _S_start_free += __total_bytes;
       
   119         return(__result);
       
   120     } else if (__bytes_left >= __p_size) {
       
   121         __nobjs = __bytes_left/__p_size;
       
   122         __total_bytes = __p_size * __nobjs;
       
   123         __result = _S_start_free;
       
   124         _S_start_free += __total_bytes;
       
   125         return(__result);
       
   126     } else {
       
   127         size_t __bytes_to_get =
       
   128 		2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
       
   129         // Try to make use of the left-over piece.
       
   130         if (__bytes_left > 0) {
       
   131             _Pthread_alloc_per_thread_state<_Max_size>* __a = 
       
   132                 (_Pthread_alloc_per_thread_state<_Max_size>*)
       
   133 			pthread_getspecific(_S_key);
       
   134             __obj * volatile * __my_free_list =
       
   135                         __a->__free_list + _S_freelist_index(__bytes_left);
       
   136 
       
   137             ((__obj *)_S_start_free) -> __free_list_link = *__my_free_list;
       
   138             *__my_free_list = (__obj *)_S_start_free;
       
   139         }
       
   140 #       ifdef _SGI_SOURCE
       
   141           // Try to get memory that's aligned on something like a
       
   142           // cache line boundary, so as to avoid parceling out
       
   143           // parts of the same line to different threads and thus
       
   144           // possibly different processors.
       
   145           {
       
   146             const int __cache_line_size = 128;  // probable upper bound
       
   147             __bytes_to_get &= ~(__cache_line_size-1);
       
   148             _S_start_free = (char *)memalign(__cache_line_size, __bytes_to_get); 
       
   149             if (0 == _S_start_free) {
       
   150               _S_start_free = (char *)__malloc_alloc<0>::allocate(__bytes_to_get);
       
   151             }
       
   152           }
       
   153 #       else  /* !SGI_SOURCE */
       
   154           _S_start_free = (char *)__malloc_alloc<0>::allocate(__bytes_to_get);
       
   155 #       endif
       
   156         _S_heap_size += __bytes_to_get;
       
   157         _S_end_free = _S_start_free + __bytes_to_get;
       
   158     }
       
   159   }
       
   160   // lock is released here
       
   161   return(_S_chunk_alloc(__p_size, __nobjs));
       
   162 }
       
   163 
       
   164 
       
   165 /* Returns an object of size n, and optionally adds to size n free list.*/
       
   166 /* We assume that n is properly aligned.                                */
       
   167 /* We hold the allocation lock.                                         */
       
   168 template <size_t _Max_size>
       
   169 void *_Pthread_alloc_per_thread_state<_Max_size>
       
   170 ::_M_refill(size_t __n)
       
   171 {
       
   172     size_t __nobjs = 128;
       
   173     char * __chunk =
       
   174 	_Pthread_alloc<_Max_size>::_S_chunk_alloc(__n, __nobjs);
       
   175     __obj * volatile * __my_free_list;
       
   176     __obj * __result;
       
   177     __obj * __current_obj, * __next_obj;
       
   178     int __i;
       
   179 
       
   180     if (1 == __nobjs)  {
       
   181         return(__chunk);
       
   182     }
       
   183     __my_free_list = __free_list
       
   184 		 + _Pthread_alloc<_Max_size>::_S_freelist_index(__n);
       
   185 
       
   186     /* Build free list in chunk */
       
   187       __result = (__obj *)__chunk;
       
   188       *__my_free_list = __next_obj = (__obj *)(__chunk + __n);
       
   189       for (__i = 1; ; __i++) {
       
   190         __current_obj = __next_obj;
       
   191         __next_obj = (__obj *)((char *)__next_obj + __n);
       
   192         if (__nobjs - 1 == __i) {
       
   193             __current_obj -> __free_list_link = 0;
       
   194             break;
       
   195         } else {
       
   196             __current_obj -> __free_list_link = __next_obj;
       
   197         }
       
   198       }
       
   199     return(__result);
       
   200 }
       
   201 
       
   202 template <size_t _Max_size>
       
   203 void *_Pthread_alloc<_Max_size>
       
   204 ::reallocate(void *__p, size_t __old_sz, size_t __new_sz)
       
   205 {
       
   206     void * __result;
       
   207     size_t __copy_sz;
       
   208 
       
   209     if (__old_sz > _Max_size
       
   210 	&& __new_sz > _Max_size) {
       
   211         return(realloc(__p, __new_sz));
       
   212     }
       
   213     if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
       
   214     __result = allocate(__new_sz);
       
   215     __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
       
   216     memcpy(__result, __p, __copy_sz);
       
   217     deallocate(__p, __old_sz);
       
   218     return(__result);
       
   219 }
       
   220 
       
   221 #if defined (_STLP_STATIC_TEMPLATE_DATA) && (_STLP_STATIC_TEMPLATE_DATA > 0)
       
   222 
       
   223 template <size_t _Max_size>
       
   224 _Pthread_alloc_per_thread_state<_Max_size> * _Pthread_alloc<_Max_size>::_S_free_per_thread_states = 0;
       
   225 
       
   226 template <size_t _Max_size>
       
   227 pthread_key_t _Pthread_alloc<_Max_size>::_S_key =0;
       
   228 
       
   229 template <size_t _Max_size>
       
   230 bool _Pthread_alloc<_Max_size>::_S_key_initialized = false;
       
   231 
       
   232 template <size_t _Max_size>
       
   233 _STLP_mutex_base _Pthread_alloc<_Max_size>::_S_chunk_allocator_lock _STLP_MUTEX_INITIALIZER;
       
   234 
       
   235 template <size_t _Max_size>
       
   236 char *_Pthread_alloc<_Max_size>::_S_start_free = 0;
       
   237 
       
   238 template <size_t _Max_size>
       
   239 char *_Pthread_alloc<_Max_size>::_S_end_free = 0;
       
   240 
       
   241 template <size_t _Max_size>
       
   242 size_t _Pthread_alloc<_Max_size>::_S_heap_size = 0;
       
   243 
       
   244  # else
       
   245  
       
   246  __DECLARE_INSTANCE(template <size_t _Max_size> _Pthread_alloc_per_thread_state<_Max_size> *, _Pthread_alloc<_Max_size>::_S_free_per_thread_states, = 0);
       
   247  __DECLARE_INSTANCE(template <size_t _Max_size> pthread_key_t, _Pthread_alloc<_Max_size>::_S_key, = 0);
       
   248  __DECLARE_INSTANCE(template <size_t _Max_size> bool, _Pthread_alloc<_Max_size>::_S_key_initialized, = false);
       
   249  __DECLARE_INSTANCE(template <size_t _Max_size> char *, _Pthread_alloc<_Max_size>::_S_start_free, = 0);
       
   250  __DECLARE_INSTANCE(template <size_t _Max_size> char *, _Pthread_alloc<_Max_size>::_S_end_free, = 0);
       
   251  __DECLARE_INSTANCE(template <size_t _Max_size> size_t, _Pthread_alloc<_Max_size>::_S_heap_size, = 0);
       
   252 
       
   253 # endif
       
   254 
       
   255 _STLP_END_NAMESPACE
       
   256 
       
   257 # endif /* _STLP_EXPOSE_GLOBALS_IMPLEMENTATION */
       
   258 
       
   259 #endif /*  _STLP_PTHREAD_ALLOC_C */
       
   260 
       
   261 // Local Variables:
       
   262 // mode:C++
       
   263 // End: