equal
deleted
inserted
replaced
37 |
37 |
38 #include <stl/_threads.h> |
38 #include <stl/_threads.h> |
39 |
39 |
40 #include "lock_free_slist.h" |
40 #include "lock_free_slist.h" |
41 |
41 |
|
42 |
42 #if defined(__SYMBIAN32__WSD__) |
43 #if defined(__SYMBIAN32__WSD__) |
43 #include "libstdcppwsd.h" |
44 #include "libstdcppwsd.h" |
44 |
45 |
45 #define __oom_handler get_oom_handler() |
46 #define __oom_handler get_oom_handler() |
46 #define _S_lock get_allocator_S_lock() |
47 #define _S_lock get_allocator_S_lock() |
50 #define _S_free_list get_S_free_list() |
51 #define _S_free_list get_S_free_list() |
51 #define _S_chunk_allocator_lock get_S_chunk_allocator_lock() |
52 #define _S_chunk_allocator_lock get_S_chunk_allocator_lock() |
52 #define _S_free_per_thread_states get_S_free_per_thread_states() |
53 #define _S_free_per_thread_states get_S_free_per_thread_states() |
53 #define _S_key get_S_key() |
54 #define _S_key get_S_key() |
54 #define _S_key_initialized get_S_key_initialized() |
55 #define _S_key_initialized get_S_key_initialized() |
55 |
56 #endif |
56 |
57 |
|
58 #ifdef __SYMBIAN32__ |
|
59 extern "C" |
|
60 { |
|
61 IMPORT_C void* BackendAlloc(size_t ); |
|
62 IMPORT_C void BackendFree(void* ); |
|
63 } |
|
64 |
|
65 |
|
66 EXPORT_C void* backend_allocate(size_t __n) |
|
67 { |
|
68 for (;;) |
|
69 { |
|
70 void* p = BackendAlloc(__n); |
|
71 |
|
72 if (p) |
|
73 { |
|
74 return p; |
|
75 } |
|
76 |
|
77 // set_new_handler uses Dll::Tls. So only this threads new handler will be changed |
|
78 // for the time it is set back. No problems for other threads. |
|
79 std::new_handler nh_func = std::set_new_handler(NULL); |
|
80 std::set_new_handler(nh_func); |
|
81 |
|
82 if (nh_func) |
|
83 { |
|
84 nh_func(); |
|
85 } |
|
86 else |
|
87 { |
|
88 __THROW(std::bad_alloc()); |
|
89 } |
|
90 } |
|
91 } |
|
92 |
|
93 EXPORT_C void backend_free(void* __p) |
|
94 { |
|
95 BackendFree(__p); |
|
96 } |
57 #endif |
97 #endif |
58 |
98 |
59 #if defined (__WATCOMC__) |
99 #if defined (__WATCOMC__) |
60 # pragma warning 13 9 |
100 # pragma warning 13 9 |
61 # pragma warning 367 9 |
101 # pragma warning 367 9 |
82 # ifdef _STLP_NODE_ALLOC_USE_MALLOC |
122 # ifdef _STLP_NODE_ALLOC_USE_MALLOC |
83 # include <cstdlib> |
123 # include <cstdlib> |
84 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); } |
124 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); } |
85 inline void __stlp_chunck_free(void* __p) { _STLP_VENDOR_CSTD::free(__p); } |
125 inline void __stlp_chunck_free(void* __p) { _STLP_VENDOR_CSTD::free(__p); } |
86 # else |
126 # else |
87 inline void* __stlp_chunk_malloc(size_t __bytes) { return _STLP_STD::__stl_new(__bytes); } |
127 inline void* __stlp_chunk_malloc(size_t __bytes) { |
88 inline void __stlp_chunck_free(void* __p) { _STLP_STD::__stl_delete(__p); } |
128 return _STLP_STD::__stl_new(__bytes); |
|
129 } |
|
130 inline void __stlp_chunck_free(void* __p) { |
|
131 _STLP_STD::__stl_delete(__p); |
|
132 } |
|
133 |
89 # endif |
134 # endif |
90 #endif // !_DEBUG |
135 #endif // !_DEBUG |
91 |
136 |
92 #define _S_FREELIST_INDEX(__bytes) ((__bytes - size_t(1)) >> (int)_ALIGN_SHIFT) |
137 #define _S_FREELIST_INDEX(__bytes) ((__bytes - size_t(1)) >> (int)_ALIGN_SHIFT) |
93 |
138 |
171 // without permanently losing part of the object. |
216 // without permanently losing part of the object. |
172 // |
217 // |
173 |
218 |
174 #define _STLP_NFREELISTS 16 |
219 #define _STLP_NFREELISTS 16 |
175 |
220 |
176 #if defined (_STLP_LEAKS_PEDANTIC) && defined (_STLP_USE_DYNAMIC_LIB) |
221 /* |
|
222 * On Symbian, the stlport is built as a dll and also dynamically linked against |
|
223 * by the applications. The _STLP_USE_DYNAMIC_LIB should always be defined. |
|
224 * _STLP_LEAKS_PEDANTIC is defined to prevent the memory leaks in __node_alloc |
|
225 * when the library is dynamically loaded and unloaded. |
|
226 */ |
|
227 #if defined (_STLP_LEAKS_PEDANTIC) && ( defined (_STLP_USE_DYNAMIC_LIB) || defined (__SYMBIAN32__) ) |
177 /* |
228 /* |
178 * We can only do cleanup of the node allocator memory pool if we are |
229 * We can only do cleanup of the node allocator memory pool if we are |
179 * sure that the STLport library is used as a shared one as it guaranties |
230 * sure that the STLport library is used as a shared one as it guaranties |
180 * the unicity of the node allocator instance. Without that guaranty node |
231 * the unicity of the node allocator instance. Without that guaranty node |
181 * allocator instances might exchange memory blocks making the implementation |
232 * allocator instances might exchange memory blocks making the implementation |
322 // Beginning of the linked list of allocated chunks of memory |
373 // Beginning of the linked list of allocated chunks of memory |
323 static _ChunkList _S_chunks; |
374 static _ChunkList _S_chunks; |
324 #endif /* _STLP_DO_CLEAN_NODE_ALLOC */ |
375 #endif /* _STLP_DO_CLEAN_NODE_ALLOC */ |
325 |
376 |
326 public: |
377 public: |
|
378 |
327 /* __n must be > 0 */ |
379 /* __n must be > 0 */ |
328 static void* _M_allocate(size_t& __n); |
380 static void* _M_allocate(size_t& __n); |
329 /* __p may not be 0 */ |
381 /* __p may not be 0 */ |
330 static void _M_deallocate(void *__p, size_t __n); |
382 static void _M_deallocate(void *__p, size_t __n); |
331 |
383 |
334 #endif |
386 #endif |
335 }; |
387 }; |
336 |
388 |
337 #if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION) |
389 #if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION) |
338 void* __node_alloc_impl::_M_allocate(size_t& __n) { |
390 void* __node_alloc_impl::_M_allocate(size_t& __n) { |
|
391 |
339 __n = _S_round_up(__n); |
392 __n = _S_round_up(__n); |
340 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); |
393 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); |
341 _Obj *__r; |
394 _Obj *__r; |
342 |
395 |
343 // Acquire the lock here with a constructor call. |
396 // Acquire the lock here with a constructor call. |
356 // lock is released here |
409 // lock is released here |
357 return __r; |
410 return __r; |
358 } |
411 } |
359 |
412 |
360 void __node_alloc_impl::_M_deallocate(void *__p, size_t __n) { |
413 void __node_alloc_impl::_M_deallocate(void *__p, size_t __n) { |
|
414 |
361 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); |
415 _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n); |
362 _Obj * __pobj = __STATIC_CAST(_Obj*, __p); |
416 _Obj * __pobj = __STATIC_CAST(_Obj*, __p); |
363 |
417 |
364 // acquire lock |
418 // acquire lock |
365 _Node_Alloc_Lock __lock_instance; |
419 _Node_Alloc_Lock __lock_instance; |
496 __pcur = __pnext; |
550 __pcur = __pnext; |
497 } |
551 } |
498 _S_chunks = 0; |
552 _S_chunks = 0; |
499 _S_start_free = _S_end_free = 0; |
553 _S_start_free = _S_end_free = 0; |
500 _S_heap_size = 0; |
554 _S_heap_size = 0; |
501 memset(__REINTERPRET_CAST(char*, &_S_free_list[0]), 0, _STLP_NFREELISTS * sizeof(_Obj*)); |
555 // Reinterprest cast cant remove volatileness. So using C style cast |
|
556 memset((char*)(&_S_free_list[0]), 0, _STLP_NFREELISTS * sizeof(_Obj*)); |
502 } |
557 } |
503 # endif /* _STLP_DO_CLEAN_NODE_ALLOC */ |
558 # endif /* _STLP_DO_CLEAN_NODE_ALLOC */ |
504 |
559 |
505 #else /* !defined(_STLP_USE_LOCK_FREE_IMPLEMENTATION) */ |
560 #else /* !defined(_STLP_USE_LOCK_FREE_IMPLEMENTATION) */ |
506 |
561 |
712 #endif /* !defined(_STLP_USE_LOCK_FREE_IMPLEMENTATION) */ |
767 #endif /* !defined(_STLP_USE_LOCK_FREE_IMPLEMENTATION) */ |
713 |
768 |
714 #if defined (_STLP_DO_CLEAN_NODE_ALLOC) |
769 #if defined (_STLP_DO_CLEAN_NODE_ALLOC) |
715 struct __node_alloc_cleaner { |
770 struct __node_alloc_cleaner { |
716 ~__node_alloc_cleaner() |
771 ~__node_alloc_cleaner() |
717 { __node_alloc_impl::_S_dealloc_call(); } |
772 { |
|
773 __node_alloc_impl::_S_dealloc_call(); |
|
774 } |
718 }; |
775 }; |
719 |
776 |
720 # if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION) |
777 # if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION) |
721 _STLP_VOLATILE __stl_atomic_t& _STLP_CALL |
778 _STLP_VOLATILE __stl_atomic_t& _STLP_CALL |
722 # else |
779 # else |