|
1 /* |
|
2 * Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies). |
|
3 * All rights reserved. |
|
4 * This component and the accompanying materials are made available |
|
5 * under the terms of "Eclipse Public License v1.0" |
|
6 * which accompanies this distribution, and is available |
|
7 * at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
8 * |
|
9 * Initial Contributors: |
|
10 * Nokia Corporation - initial contribution. |
|
11 * |
|
12 * Contributors: |
|
13 * |
|
14 * Description: |
|
15 * |
|
16 */ |
|
17 #ifndef NEWALLOCATOR_H |
|
18 #define NEWALLOCATOR_H |
|
19 |
|
20 class RNewAllocator : public RAllocator |
|
21 { |
|
22 public: |
|
23 enum{EAllocCellSize = 8}; |
|
24 |
|
25 virtual TAny* Alloc(TInt aSize); |
|
26 virtual void Free(TAny* aPtr); |
|
27 virtual TAny* ReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0); |
|
28 virtual TInt AllocLen(const TAny* aCell) const; |
|
29 virtual TInt Compress(); |
|
30 virtual void Reset(); |
|
31 virtual TInt AllocSize(TInt& aTotalAllocSize) const; |
|
32 virtual TInt Available(TInt& aBiggestBlock) const; |
|
33 virtual TInt DebugFunction(TInt aFunc, TAny* a1=NULL, TAny* a2=NULL); |
|
34 protected: |
|
35 virtual TInt Extension_(TUint aExtensionId, TAny*& a0, TAny* a1); |
|
36 |
|
37 public: |
|
38 TInt Size() const |
|
39 { return iChunkSize; } |
|
40 |
|
41 inline TInt MaxLength() const; |
|
42 inline TUint8* Base() const; |
|
43 inline TInt Align(TInt a) const; |
|
44 inline const TAny* Align(const TAny* a) const; |
|
45 inline void Lock() const; |
|
46 inline void Unlock() const; |
|
47 inline TInt ChunkHandle() const; |
|
48 |
|
49 /** |
|
50 @internalComponent |
|
51 */ |
|
52 struct _s_align {char c; double d;}; |
|
53 |
|
54 /** |
|
55 The structure of a heap cell header for a heap cell on the free list. |
|
56 */ |
|
57 struct SCell { |
|
58 /** |
|
59 The length of the cell, which includes the length of |
|
60 this header. |
|
61 */ |
|
62 TInt len; |
|
63 |
|
64 |
|
65 /** |
|
66 A pointer to the next cell in the free list. |
|
67 */ |
|
68 SCell* next; |
|
69 }; |
|
70 |
|
71 /** |
|
72 The default cell alignment. |
|
73 */ |
|
74 enum {ECellAlignment = sizeof(_s_align)-sizeof(double)}; |
|
75 |
|
76 /** |
|
77 Size of a free cell header. |
|
78 */ |
|
79 enum {EFreeCellSize = sizeof(SCell)}; |
|
80 |
|
81 /** |
|
82 @internalComponent |
|
83 */ |
|
84 enum TDefaultShrinkRatios {EShrinkRatio1=256, EShrinkRatioDflt=512}; |
|
85 |
|
86 public: |
|
87 RNewAllocator(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue); |
|
88 RNewAllocator(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign=0, TBool aSingleThread=EFalse); |
|
89 inline RNewAllocator(); |
|
90 |
|
91 TAny* operator new(TUint aSize, TAny* aBase) __NO_THROW; |
|
92 inline void operator delete(TAny*, TAny*); |
|
93 |
|
94 protected: |
|
95 SCell* GetAddress(const TAny* aCell) const; |
|
96 |
|
97 public: |
|
98 TInt iMinLength; |
|
99 TInt iMaxLength; // maximum bytes used by the allocator in total |
|
100 TInt iOffset; // offset of RNewAllocator object from chunk base |
|
101 TInt iGrowBy; |
|
102 |
|
103 TInt iChunkHandle; // handle of chunk |
|
104 RFastLock iLock; |
|
105 TUint8* iBase; // bottom of DL memory, i.e. this+sizeof(RNewAllocator) |
|
106 TUint8* iTop; // top of DL memory (page aligned) |
|
107 TInt iAlign; |
|
108 TInt iMinCell; |
|
109 TInt iPageSize; |
|
110 SCell iFree; |
|
111 protected: |
|
112 TInt iNestingLevel; |
|
113 TInt iAllocCount; |
|
114 TAllocFail iFailType; |
|
115 TInt iFailRate; |
|
116 TBool iFailed; |
|
117 TInt iFailAllocCount; |
|
118 TInt iRand; |
|
119 TAny* iTestData; |
|
120 protected: |
|
121 TInt iChunkSize; // currently allocated bytes in the chunk (== chunk.Size()) |
|
122 malloc_state iGlobalMallocState; |
|
123 malloc_params mparams; |
|
124 private: |
|
125 void Init(TInt aBitmapSlab, TInt aPagePower, size_t aTrimThreshold);/*Init internal data structures*/ |
|
126 inline int init_mparams(size_t aTrimThreshold /*= DEFAULT_TRIM_THRESHOLD*/); |
|
127 inline void init_bins(mstate m); |
|
128 inline void init_top(mstate m, mchunkptr p, size_t psize); |
|
129 void* sys_alloc(mstate m, size_t nb); |
|
130 msegmentptr segment_holding(mstate m, TUint8* addr); |
|
131 void add_segment(mstate m, TUint8* tbase, size_t tsize, flag_t mmapped); |
|
132 int sys_trim(mstate m, size_t pad); |
|
133 int has_segment_link(mstate m, msegmentptr ss); |
|
134 size_t release_unused_segments(mstate m); |
|
135 void* mmap_alloc(mstate m, size_t nb);/*Need to check this function*/ |
|
136 void* prepend_alloc(mstate m, TUint8* newbase, TUint8* oldbase, size_t nb); |
|
137 void* tmalloc_large(mstate m, size_t nb); |
|
138 void* tmalloc_small(mstate m, size_t nb); |
|
139 /*MACROS converted functions*/ |
|
140 static inline void unlink_first_small_chunk(mstate M,mchunkptr B,mchunkptr P,bindex_t& I); |
|
141 static inline void insert_small_chunk(mstate M,mchunkptr P, size_t S); |
|
142 static inline void insert_chunk(mstate M,mchunkptr P,size_t S); |
|
143 static inline void unlink_large_chunk(mstate M,tchunkptr X); |
|
144 static inline void unlink_small_chunk(mstate M, mchunkptr P,size_t S); |
|
145 static inline void unlink_chunk(mstate M, mchunkptr P, size_t S); |
|
146 static inline void compute_tree_index(size_t S, bindex_t& I); |
|
147 static inline void insert_large_chunk(mstate M,tchunkptr X,size_t S); |
|
148 static inline void replace_dv(mstate M, mchunkptr P, size_t S); |
|
149 static inline void compute_bit2idx(binmap_t X,bindex_t& I); |
|
150 /*MACROS converted functions*/ |
|
151 TAny* SetBrk(TInt32 aDelta); |
|
152 void* internal_realloc(mstate m, void* oldmem, size_t bytes); |
|
153 void internal_malloc_stats(mstate m); |
|
154 int change_mparam(int param_number, int value); |
|
155 #if !NO_MALLINFO |
|
156 mallinfo internal_mallinfo(mstate m); |
|
157 #endif |
|
158 void Init_Dlmalloc(size_t capacity, int locked, size_t aTrimThreshold); |
|
159 void* dlmalloc(size_t); |
|
160 void dlfree(void*); |
|
161 void* dlrealloc(void*, size_t); |
|
162 int dlmallopt(int, int); |
|
163 size_t dlmalloc_footprint(void); |
|
164 size_t dlmalloc_max_footprint(void); |
|
165 #if !NO_MALLINFO |
|
166 struct mallinfo dlmallinfo(void); |
|
167 #endif |
|
168 int dlmalloc_trim(size_t); |
|
169 size_t dlmalloc_usable_size(void*); |
|
170 void dlmalloc_stats(void); |
|
171 inline mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb); |
|
172 |
|
173 /****************************Code Added For DL heap**********************/ |
|
174 friend class UserHeap; |
|
175 private: |
|
176 unsigned short slab_threshold; |
|
177 unsigned short page_threshold; // 2^n is smallest cell size allocated in paged allocator |
|
178 unsigned slab_init_threshold; |
|
179 unsigned slab_config_bits; |
|
180 slab* partial_page;// partial-use page tree |
|
181 page* spare_page; // single empty page cached |
|
182 unsigned char sizemap[(maxslabsize>>2)+1]; // index of slabset based on size class |
|
183 private: |
|
184 static void tree_remove(slab* s); |
|
185 static void tree_insert(slab* s,slab** r); |
|
186 public: |
|
187 enum {okbits = (1<<(maxslabsize>>2))-1}; |
|
188 void slab_init(); |
|
189 void slab_config(unsigned slabbitmap); |
|
190 void* slab_allocate(slabset& allocator); |
|
191 void slab_free(void* p); |
|
192 void* allocnewslab(slabset& allocator); |
|
193 void* allocnewpage(slabset& allocator); |
|
194 void* initnewslab(slabset& allocator, slab* s); |
|
195 void freeslab(slab* s); |
|
196 void freepage(page* p); |
|
197 void* map(void* p,unsigned sz); |
|
198 void* remap(void* p,unsigned oldsz,unsigned sz); |
|
199 void unmap(void* p,unsigned sz); |
|
200 /**I think we need to move this functions to slab allocator class***/ |
|
201 static inline unsigned header_free(unsigned h) |
|
202 {return (h&0x000000ff);} |
|
203 static inline unsigned header_pagemap(unsigned h) |
|
204 {return (h&0x00000f00)>>8;} |
|
205 static inline unsigned header_size(unsigned h) |
|
206 {return (h&0x0003f000)>>12;} |
|
207 static inline unsigned header_usedm4(unsigned h) |
|
208 {return (h&0x0ffc0000)>>18;} |
|
209 /***paged allocator code***/ |
|
210 void paged_init(unsigned pagepower); |
|
211 void* paged_allocate(unsigned size); |
|
212 void paged_free(void* p); |
|
213 void* paged_reallocate(void* p, unsigned size); |
|
214 pagecell* paged_descriptor(const void* p) const ; |
|
215 private: |
|
216 // paged allocator structures |
|
217 enum {npagecells=4}; |
|
218 pagecell pagelist[npagecells]; // descriptors for page-aligned large allocations |
|
219 TAny* DLReAllocImpl(TAny* aPtr, TInt aSize); |
|
220 // to track maximum used |
|
221 //TInt iHighWaterMark; |
|
222 |
|
223 slabset slaballoc[maxslabsize>>2]; |
|
224 |
|
225 private: |
|
226 static RNewAllocator* FixedHeap(TAny* aBase, TInt aMaxLength, TInt aAlign, TBool aSingleThread); |
|
227 static RNewAllocator* ChunkHeap(const TDesC* aName, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread); |
|
228 static RNewAllocator* ChunkHeap(RChunk aChunk, TInt aMinLength, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode); |
|
229 static RNewAllocator* OffsetChunkHeap(RChunk aChunk, TInt aMinLength, TInt aOffset, TInt aGrowBy, TInt aMaxLength, TInt aAlign, TBool aSingleThread, TUint32 aMode); |
|
230 static TInt CreateThreadHeap(SStdEpocThreadCreateInfo& aInfo, RNewAllocator*& aHeap, TInt aAlign = 0, TBool aSingleThread = EFalse); |
|
231 }; |
|
232 |
|
233 #include "newallocator.inl" |
|
234 |
|
235 #endif // NEWALLOCATOR_H |