utilities/standaloneallocator/newallocator_p.h
changeset 16 3c88a81ff781
equal deleted inserted replaced
14:6aeb7a756187 16:3c88a81ff781
       
     1 /*
       
     2  * Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies).
       
     3  *
       
     4  * This file is part of Qt Web Runtime.
       
     5  *
       
     6  * This library is free software; you can redistribute it and/or
       
     7  * modify it under the terms of the GNU Lesser General Public License
       
     8  * version 2.1 as published by the Free Software Foundation.
       
     9  *
       
    10  * This library is distributed in the hope that it will be useful,
       
    11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
       
    12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
       
    13  * Lesser General Public License for more details.
       
    14  *
       
    15  * You should have received a copy of the GNU Lesser General Public
       
    16  * License along with this library; if not, write to the Free Software
       
    17  * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
       
    18  *
       
    19  */
       
    20 
       
    21 
       
    22 /****************************************************************************
       
    23  *
       
    24  * This file is part of the Symbian application wrapper of the Qt Toolkit.
       
    25  *
       
    26  * The memory allocator is backported from Symbian OS, and can eventually
       
    27  * be removed from Qt once it is built in to all supported OS versions.
       
    28  * The allocator is a composite of three allocators:
       
    29  *  - A page allocator, for large allocations
       
    30  *  - A slab allocator, for small allocations
       
    31  *  - Doug Lea's allocator, for medium size allocations
       
    32  *
       
    33  ***************************************************************************/
       
    34 
       
    35 
       
    36 #ifndef NEWALLOCATOR_H
       
    37 #define NEWALLOCATOR_H
       
    38 
       
    39 class RNewAllocator : public RAllocator
       
    40     {
       
    41 public:
       
    42     enum{EAllocCellSize = 8};
       
    43 
       
    44     virtual TAny* Alloc(TInt aSize);
       
    45     virtual void Free(TAny* aPtr);
       
    46     virtual TAny* ReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0);
       
    47     virtual TInt AllocLen(const TAny* aCell) const;
       
    48     virtual TInt Compress();
       
    49     virtual void Reset();
       
    50     virtual TInt AllocSize(TInt& aTotalAllocSize) const;
       
    51     virtual TInt Available(TInt& aBiggestBlock) const;
       
    52     virtual TInt DebugFunction(TInt aFunc, TAny* a1=NULL, TAny* a2=NULL);
       
    53 protected:
       
    54     virtual TInt Extension_(TUint aExtensionId, TAny*& a0, TAny* a1);
       
    55 
       
    56 public:
       
    57     TInt Size() const
       
    58     { return iChunkSize; }
       
    59 
       
    60     inline TInt MaxLength() const;
       
    61     inline TUint8* Base() const;
       
    62     inline TInt Align(TInt a) const;
       
    63     inline const TAny* Align(const TAny* a) const;
       
    64     inline void Lock() const;
       
    65     inline void Unlock() const;
       
    66     inline TInt ChunkHandle() const;
       
    67 
       
    68     /**
       
    69     @internalComponent
       
    70     */
       
    71     struct _s_align {char c; double d;};
       
    72 
       
    73     /**
       
    74     The structure of a heap cell header for a heap cell on the free list.
       
    75     */
       
    76     struct SCell {
       
    77                  /**
       
    78                  The length of the cell, which includes the length of
       
    79                  this header.
       
    80                  */
       
    81                  TInt len;
       
    82 
       
    83 
       
    84                  /**
       
    85                  A pointer to the next cell in the free list.
       
    86                  */
       
    87                  SCell* next;
       
    88                  };
       
    89 
       
    90     /**
       
    91     The default cell alignment.
       
    92     */
       
    93     enum {ECellAlignment = sizeof(_s_align)-sizeof(double)};
       
    94 
       
    95     /**
       
    96     Size of a free cell header.
       
    97     */
       
    98     enum {EFreeCellSize = sizeof(SCell)};
       
    99 
       
   100     /**
       
   101     @internalComponent
       
   102     */
       
   103     enum TDefaultShrinkRatios {EShrinkRatio1=256, EShrinkRatioDflt=512};
       
   104 
       
   105 public:
       
   106     RNewAllocator(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue);
       
   107     RNewAllocator(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign=0, TBool aSingleThread=EFalse);
       
   108     inline RNewAllocator();
       
   109 
       
   110     TAny* operator new(TUint aSize, TAny* aBase) __NO_THROW;
       
   111     inline void operator delete(TAny*, TAny*);
       
   112 
       
   113 protected:
       
   114     SCell* GetAddress(const TAny* aCell) const;
       
   115 
       
   116 public:
       
   117     TInt iMinLength;
       
   118     TInt iMaxLength;            // maximum bytes used by the allocator in total
       
   119     TInt iOffset;                   // offset of RNewAllocator object from chunk base
       
   120     TInt iGrowBy;
       
   121 
       
   122     TInt iChunkHandle;          // handle of chunk
       
   123     RFastLock iLock;
       
   124     TUint8* iBase;              // bottom of DL memory, i.e. this+sizeof(RNewAllocator)
       
   125     TUint8* iTop;                   // top of DL memory (page aligned)
       
   126     TInt iAlign;
       
   127     TInt iMinCell;
       
   128     TInt iPageSize;
       
   129     SCell iFree;
       
   130 protected:
       
   131     TInt iNestingLevel;
       
   132     TInt iAllocCount;
       
   133     TAllocFail iFailType;
       
   134     TInt iFailRate;
       
   135     TBool iFailed;
       
   136     TInt iFailAllocCount;
       
   137     TInt iRand;
       
   138     TAny* iTestData;
       
   139 protected:
       
   140     TInt iChunkSize;                // currently allocated bytes in the chunk (== chunk.Size())
       
   141     malloc_state iGlobalMallocState;
       
   142     malloc_params mparams;
       
   143     TInt iHighWaterMark;
       
   144 private:
       
   145     void Init(TInt aBitmapSlab, TInt aPagePower, size_t aTrimThreshold);/*Init internal data structures*/
       
   146     inline int init_mparams(size_t aTrimThreshold /*= DEFAULT_TRIM_THRESHOLD*/);
       
   147     void init_bins(mstate m);
       
   148     void init_top(mstate m, mchunkptr p, size_t psize);
       
   149     void* sys_alloc(mstate m, size_t nb);
       
   150     msegmentptr segment_holding(mstate m, TUint8* addr);
       
   151     void add_segment(mstate m, TUint8* tbase, size_t tsize, flag_t mmapped);
       
   152     int sys_trim(mstate m, size_t pad);
       
   153     int has_segment_link(mstate m, msegmentptr ss);
       
   154     size_t release_unused_segments(mstate m);
       
   155     void* mmap_alloc(mstate m, size_t nb);/*Need to check this function*/
       
   156     void* prepend_alloc(mstate m, TUint8* newbase, TUint8* oldbase, size_t nb);
       
   157     void* tmalloc_large(mstate m, size_t nb);
       
   158     void* tmalloc_small(mstate m, size_t nb);
       
   159     /*MACROS converted functions*/
       
   160     static inline void unlink_first_small_chunk(mstate M,mchunkptr B,mchunkptr P,bindex_t& I);
       
   161     static inline void insert_small_chunk(mstate M,mchunkptr P, size_t S);
       
   162     static inline void insert_chunk(mstate M,mchunkptr P,size_t S,size_t NPAGES);
       
   163     static inline void unlink_large_chunk(mstate M,tchunkptr X);
       
   164     static inline void unlink_small_chunk(mstate M, mchunkptr P,size_t S);
       
   165     static inline void unlink_chunk(mstate M, mchunkptr P, size_t S);
       
   166     static inline void compute_tree_index(size_t S, bindex_t& I);
       
   167     static inline void insert_large_chunk(mstate M,tchunkptr X,size_t S,size_t NPAGES);
       
   168     static inline void replace_dv(mstate M, mchunkptr P, size_t S);
       
   169     static inline void compute_bit2idx(binmap_t X,bindex_t& I);
       
   170     /*MACROS converted functions*/
       
   171     TAny* SetBrk(TInt32 aDelta);
       
   172     void* internal_realloc(mstate m, void* oldmem, size_t bytes);
       
   173     void  internal_malloc_stats(mstate m);
       
   174     int change_mparam(int param_number, int value);
       
   175 #if !NO_MALLINFO
       
   176         mallinfo internal_mallinfo(mstate m);
       
   177 #endif
       
   178     void Init_Dlmalloc(size_t capacity, int locked, size_t aTrimThreshold);
       
   179     void* dlmalloc(size_t);
       
   180     void  dlfree(void*);
       
   181     void* dlrealloc(void*, size_t);
       
   182     int dlmallopt(int, int);
       
   183     size_t dlmalloc_footprint(void);
       
   184     size_t dlmalloc_max_footprint(void);
       
   185     #if !NO_MALLINFO
       
   186         struct mallinfo dlmallinfo(void);
       
   187     #endif
       
   188     int  dlmalloc_trim(size_t);
       
   189     size_t dlmalloc_usable_size(void*);
       
   190     void  dlmalloc_stats(void);
       
   191     inline  mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb);
       
   192 
       
   193         /****************************Code Added For DL heap**********************/
       
   194     friend TInt _symbian_SetupThreadHeap(TBool aNotFirst, SStdEpocThreadCreateInfo& aInfo);
       
   195 private:
       
   196     unsigned short slab_threshold;
       
   197     unsigned short page_threshold;      // 2^n is smallest cell size allocated in paged allocator
       
   198     unsigned slab_init_threshold;
       
   199     unsigned slab_config_bits;
       
   200     slab* partial_page;// partial-use page tree
       
   201     page* spare_page;                   // single empty page cached
       
   202     unsigned char sizemap[(maxslabsize>>2)+1];  // index of slabset based on size class
       
   203 private:
       
   204     static void tree_remove(slab* s);
       
   205     static void tree_insert(slab* s,slab** r);
       
   206 public:
       
   207     enum {okbits = (1<<(maxslabsize>>2))-1};
       
   208     void slab_init(unsigned slabbitmap);
       
   209     void slab_config(unsigned slabbitmap);
       
   210     void* slab_allocate(slabset& allocator);
       
   211     void slab_free(void* p);
       
   212     void* allocnewslab(slabset& allocator);
       
   213     void* allocnewpage(slabset& allocator);
       
   214     void* initnewslab(slabset& allocator, slab* s);
       
   215     void freeslab(slab* s);
       
   216     void freepage(page* p);
       
   217     void* map(void* p,unsigned sz);
       
   218     void* remap(void* p,unsigned oldsz,unsigned sz);
       
   219     void unmap(void* p,unsigned sz);
       
   220     /**I think we need to move this functions to slab allocator class***/
       
   221     static inline unsigned header_free(unsigned h)
       
   222     {return (h&0x000000ff);}
       
   223     static inline unsigned header_pagemap(unsigned h)
       
   224     {return (h&0x00000f00)>>8;}
       
   225     static inline unsigned header_size(unsigned h)
       
   226     {return (h&0x0003f000)>>12;}
       
   227     static inline unsigned header_usedm4(unsigned h)
       
   228     {return (h&0x0ffc0000)>>18;}
       
   229     /***paged allocator code***/
       
   230     void paged_init(unsigned pagepower);
       
   231     void* paged_allocate(unsigned size);
       
   232     void paged_free(void* p);
       
   233     void* paged_reallocate(void* p, unsigned size);
       
   234     pagecell* paged_descriptor(const void* p) const ;
       
   235 
       
   236     /* Dl heap log dump functions*/
       
   237 #ifdef OOM_LOGGING
       
   238     void dump_heap_logs(size_t fail_size);
       
   239     void dump_dl_free_chunks();
       
   240     void dump_large_chunk(mstate m, tchunkptr t);
       
   241     size_t iUnmappedChunkSize;
       
   242 #endif
       
   243 private:
       
   244      /* Dubug checks for chunk page support*/
       
   245 #ifdef DL_CHUNK_MEM_DEBUG
       
   246 #define do_chunk_page_release_check(p, psize, fm, mem_released) debug_chunk_page_release_check(p, psize, fm, mem_released)
       
   247 #define do_check_large_chunk_access(p, psize) debug_check_large_chunk_access(p, psize)
       
   248 #define do_check_small_chunk_access(p, psize) debug_check_small_chunk_access(p, psize)
       
   249 #define do_check_any_chunk_access(p, psize) debug_check_any_chunk_access(p, psize)
       
   250     void debug_check_large_chunk_access(tchunkptr p, size_t psize);
       
   251     void debug_check_small_chunk_access(mchunkptr p, size_t psize);
       
   252     void debug_check_any_chunk_access(mchunkptr p, size_t psize);
       
   253     void debug_chunk_page_release_check(mchunkptr p, size_t psize, mstate fm, int mem_released);
       
   254 #else
       
   255 #define do_chunk_page_release_check(p, psize, fm, mem_released)
       
   256 #define do_check_large_chunk_access(p, psize)
       
   257 #define do_check_small_chunk_access(p, psize)
       
   258 #define do_check_any_chunk_access(p, psize)
       
   259 #endif
       
   260 
       
   261     /* Chunk page release mechanism support */
       
   262     TInt map_chunk_pages(tchunkptr p, size_t psize);
       
   263     TInt unmap_chunk_pages(tchunkptr p, size_t psize, size_t prev_npages);
       
   264     TInt map_chunk_pages_partial(tchunkptr tp, size_t psize, tchunkptr r, size_t rsize);
       
   265     TInt sys_trim_partial(mstate m, mchunkptr prev, size_t psize, size_t prev_npages);
       
   266     size_t free_chunk_threshold;
       
   267 
       
   268     // paged allocator structures
       
   269     enum {npagecells=4};
       
   270     pagecell pagelist[npagecells];      // descriptors for page-aligned large allocations
       
   271     inline void TraceReAlloc(TAny* aPtr, TInt aSize, TAny* aNewPtr, TInt aZone);
       
   272     inline void TraceCallStack();
       
   273     // to track maximum used
       
   274     //TInt iHighWaterMark;
       
   275 
       
   276 
       
   277 private:
       
   278     static RNewAllocator* FixedHeap(TAny* aBase, TInt aMaxLength, TInt aAlign, TBool aSingleThread);
       
   279     static RNewAllocator* ChunkHeap(const TDesC* aName, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread);
       
   280     static RNewAllocator* ChunkHeap(RChunk aChunk, TInt aMinLength, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode);
       
   281     static RNewAllocator* OffsetChunkHeap(RChunk aChunk, TInt aMinLength, TInt aOffset, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode);
       
   282     static TInt CreateThreadHeap(SStdEpocThreadCreateInfo& aInfo, RNewAllocator*& aHeap, TInt aAlign = 0, TBool aSingleThread = EFalse);
       
   283 
       
   284 
       
   285 private:
       
   286     /**
       
   287      *always keep this variable at the bottom of the class as its used as
       
   288      * array of more than 1 objest though it declared a single.
       
   289      *  TODO: dynamic sizing of heap object based on slab configuration.
       
   290      *             Just allocate maximum number of slabsets for now
       
   291      * */
       
   292     slabset slaballoc[maxslabsize>>2];
       
   293 
       
   294 };
       
   295 
       
   296 inline RNewAllocator::RNewAllocator()
       
   297     {}
       
   298 
       
   299 /**
       
   300 @return The maximum length to which the heap can grow.
       
   301 
       
   302 @publishedAll
       
   303 @released
       
   304 */
       
   305 inline TInt RNewAllocator::MaxLength() const
       
   306     {return iMaxLength;}
       
   307 
       
   308 inline void RNewAllocator::operator delete(TAny*, TAny*)
       
   309 /**
       
   310 Called if constructor issued by operator new(TUint aSize, TAny* aBase) throws exception.
       
   311 This is dummy as corresponding new operator does not allocate memory.
       
   312 */
       
   313     {}
       
   314 
       
   315 
       
   316 inline TUint8* RNewAllocator::Base() const
       
   317 /**
       
   318 Gets a pointer to the start of the heap.
       
   319 
       
   320 Note that because of the small space overhead incurred by all allocated cells,
       
   321 no cell will have the same address as that returned by this function.
       
   322 
       
   323 @return A pointer to the base of the heap.
       
   324 */
       
   325     {return iBase;}
       
   326 
       
   327 
       
   328 inline TInt RNewAllocator::Align(TInt a) const
       
   329 /**
       
   330 @internalComponent
       
   331 */
       
   332     {return _ALIGN_UP(a, iAlign);}
       
   333 
       
   334 
       
   335 
       
   336 
       
   337 inline const TAny* RNewAllocator::Align(const TAny* a) const
       
   338 /**
       
   339 @internalComponent
       
   340 */
       
   341     {return (const TAny*)_ALIGN_UP((TLinAddr)a, iAlign);}
       
   342 
       
   343 
       
   344 
       
   345 inline void RNewAllocator::Lock() const
       
   346 /**
       
   347 @internalComponent
       
   348 */
       
   349     {((RFastLock&)iLock).Wait();}
       
   350 
       
   351 
       
   352 
       
   353 
       
   354 inline void RNewAllocator::Unlock() const
       
   355 /**
       
   356 @internalComponent
       
   357 */
       
   358     {((RFastLock&)iLock).Signal();}
       
   359 
       
   360 
       
   361 inline TInt RNewAllocator::ChunkHandle() const
       
   362 /**
       
   363 @internalComponent
       
   364 */
       
   365     {
       
   366     return iChunkHandle;
       
   367     }
       
   368 
       
   369 #endif // NEWALLOCATOR_H