genericopenlibs/cppstdlib/stl/src/allocators.cpp
branchRCL_3
changeset 54 4332f0f7be53
parent 0 e4d67989cc36
child 56 acd3cd4aaceb
--- a/genericopenlibs/cppstdlib/stl/src/allocators.cpp	Thu Jul 15 20:34:46 2010 +0300
+++ b/genericopenlibs/cppstdlib/stl/src/allocators.cpp	Thu Aug 19 11:33:45 2010 +0300
@@ -39,6 +39,7 @@
 
 #include "lock_free_slist.h"
 
+
 #if defined(__SYMBIAN32__WSD__)
 #include "libstdcppwsd.h"
 
@@ -52,8 +53,47 @@
 #define _S_free_per_thread_states	get_S_free_per_thread_states()
 #define _S_key						get_S_key()
 #define _S_key_initialized			get_S_key_initialized()
+#endif
+
+#ifdef __SYMBIAN32__
+extern "C"
+{
+IMPORT_C void* BackendAlloc(size_t );
+IMPORT_C void BackendFree(void* );
+}
 
 
+EXPORT_C void* backend_allocate(size_t __n)
+    {
+    for (;;)
+        {
+        void* p = BackendAlloc(__n);
+
+        if (p)
+            {
+            return p; 
+            }
+        
+        // set_new_handler uses Dll::Tls. So only this threads new handler will be changed
+        // for the time it is set back. No problems for other threads.
+        std::new_handler nh_func  = std::set_new_handler(NULL);
+        std::set_new_handler(nh_func);
+
+        if (nh_func)
+            {
+            nh_func();
+            }
+        else
+            {
+            __THROW(std::bad_alloc());
+            }
+        }
+    }
+
+EXPORT_C void  backend_free(void* __p)
+    {
+    BackendFree(__p);
+    }
 #endif
 
 #if defined (__WATCOMC__)
@@ -84,8 +124,13 @@
 inline void* __stlp_chunk_malloc(size_t __bytes) { _STLP_CHECK_NULL_ALLOC(_STLP_VENDOR_CSTD::malloc(__bytes)); }
 inline void __stlp_chunck_free(void* __p) { _STLP_VENDOR_CSTD::free(__p); }
 #  else
-inline void* __stlp_chunk_malloc(size_t __bytes) { return _STLP_STD::__stl_new(__bytes); }
-inline void __stlp_chunck_free(void* __p) { _STLP_STD::__stl_delete(__p); }
+inline void* __stlp_chunk_malloc(size_t __bytes) {
+    return _STLP_STD::__stl_new(__bytes);
+}
+inline void __stlp_chunck_free(void* __p) {
+    _STLP_STD::__stl_delete(__p);     
+}
+ 
 #  endif
 #endif  // !_DEBUG
 
@@ -173,7 +218,13 @@
 
 #define _STLP_NFREELISTS 16
 
-#if defined (_STLP_LEAKS_PEDANTIC) && defined (_STLP_USE_DYNAMIC_LIB)
+/*
+ * On Symbian, the stlport is built as a dll and also dynamically linked against 
+ * by the applications. The _STLP_USE_DYNAMIC_LIB should always be defined.
+ * _STLP_LEAKS_PEDANTIC is defined to prevent the memory leaks in __node_alloc 
+ * when the library is dynamically loaded and unloaded.
+ */
+#if defined (_STLP_LEAKS_PEDANTIC) && ( defined (_STLP_USE_DYNAMIC_LIB) || defined (__SYMBIAN32__) )
 /*
  * We can only do cleanup of the node allocator memory pool if we are
  * sure that the STLport library is used as a shared one as it guaranties
@@ -324,6 +375,7 @@
 #endif /* _STLP_DO_CLEAN_NODE_ALLOC */
 
 public:
+
   /* __n must be > 0      */
   static void* _M_allocate(size_t& __n);
   /* __p may not be 0 */
@@ -336,6 +388,7 @@
 
 #if !defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)
 void* __node_alloc_impl::_M_allocate(size_t& __n) {
+
   __n = _S_round_up(__n);
   _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
   _Obj *__r;
@@ -358,6 +411,7 @@
 }
 
 void __node_alloc_impl::_M_deallocate(void *__p, size_t __n) {
+   
   _Obj * _STLP_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
   _Obj * __pobj = __STATIC_CAST(_Obj*, __p);
 
@@ -498,7 +552,8 @@
   _S_chunks = 0;
   _S_start_free = _S_end_free = 0;
   _S_heap_size = 0;
-  memset(__REINTERPRET_CAST(char*, &_S_free_list[0]), 0, _STLP_NFREELISTS * sizeof(_Obj*));
+  // Reinterprest cast cant remove volatileness. So using C style cast
+  memset((char*)(&_S_free_list[0]), 0, _STLP_NFREELISTS * sizeof(_Obj*));
 }
 #  endif /* _STLP_DO_CLEAN_NODE_ALLOC */
 
@@ -714,7 +769,9 @@
 #if defined (_STLP_DO_CLEAN_NODE_ALLOC)
 struct __node_alloc_cleaner {
   ~__node_alloc_cleaner()
-  { __node_alloc_impl::_S_dealloc_call(); }
+      {
+      __node_alloc_impl::_S_dealloc_call(); 
+      }
 };
 
 #  if defined (_STLP_USE_LOCK_FREE_IMPLEMENTATION)