|
1 /* |
|
2 * Copyright (c) 2006 Nokia Corporation and/or its subsidiary(-ies). |
|
3 * All rights reserved. |
|
4 * This component and the accompanying materials are made available |
|
5 * under the terms of the License "Eclipse Public License v1.0" |
|
6 * which accompanies this distribution, and is available |
|
7 * at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
8 * |
|
9 * Initial Contributors: |
|
10 * Nokia Corporation - initial contribution. |
|
11 * |
|
12 * Contributors: |
|
13 * |
|
14 * Description: |
|
15 * |
|
16 * |
|
17 */ |
|
18 |
|
19 #ifndef SYMBIANDLHEAP_H_ |
|
20 #define SYMBIANDLHEAP_H_ |
|
21 |
|
22 /** |
|
23 @publishedAll |
|
24 @released |
|
25 |
|
26 Represents the default implementation for a heap. |
|
27 |
|
28 The default implementation uses an address-ordered first fit type algorithm. |
|
29 |
|
30 The heap itself is contained in a chunk and may be the only occupant of the |
|
31 chunk or may share the chunk with the program stack. |
|
32 |
|
33 The class contains member functions for allocating, adjusting, freeing individual |
|
34 cells and generally managing the heap. |
|
35 |
|
36 The class is not a handle in the same sense that RChunk is a handle; i.e. |
|
37 there is no Kernel object which corresponds to the heap. |
|
38 */ |
|
39 #include "DLA.h" |
|
40 |
|
41 #undef UIMPORT_C |
|
42 #define UIMPORT_C |
|
43 |
|
44 NONSHARABLE_CLASS(RSymbianDLHeap) : public RAllocator |
|
45 { |
|
46 public: |
|
47 enum{EAllocCellSize = 8}; |
|
48 #ifndef __MANUALLY_INCLUDED_RSymbianDLHeap_CPP__ |
|
49 UIMPORT_C virtual TAny* Alloc(TInt aSize); |
|
50 UIMPORT_C virtual void Free(TAny* aPtr); |
|
51 UIMPORT_C virtual TAny* ReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0); |
|
52 UIMPORT_C virtual TInt AllocLen(const TAny* aCell) const; |
|
53 UIMPORT_C virtual TInt Compress(); |
|
54 UIMPORT_C virtual void Reset(); |
|
55 UIMPORT_C virtual TInt AllocSize(TInt& aTotalAllocSize) const; |
|
56 UIMPORT_C virtual TInt Available(TInt& aBiggestBlock) const; |
|
57 UIMPORT_C virtual TInt DebugFunction(TInt aFunc, TAny* a1=NULL, TAny* a2=NULL); |
|
58 |
|
59 protected: |
|
60 UIMPORT_C virtual TInt Extension_(TUint aExtensionId, TAny*& a0, TAny* a1); |
|
61 #else |
|
62 virtual TAny* Alloc(TInt aSize); |
|
63 virtual void Free(TAny* aPtr); |
|
64 virtual TAny* ReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0); |
|
65 virtual TInt AllocLen(const TAny* aCell) const; |
|
66 virtual TInt Compress(); |
|
67 virtual void Reset(); |
|
68 virtual TInt AllocSize(TInt& aTotalAllocSize) const; |
|
69 virtual TInt Available(TInt& aBiggestBlock) const; |
|
70 virtual TInt DebugFunction(TInt aFunc, TAny* a1=NULL, TAny* a2=NULL); |
|
71 protected: |
|
72 virtual TInt Extension_(TUint aExtensionId, TAny*& a0, TAny* a1); |
|
73 #endif |
|
74 |
|
75 public: |
|
76 TInt Size() const |
|
77 { return iChunkSize; } |
|
78 |
|
79 inline TInt MaxLength() const; |
|
80 inline TUint8* Base() const; |
|
81 inline TInt Align(TInt a) const; |
|
82 inline const TAny* Align(const TAny* a) const; |
|
83 //inline TBool IsLastCell(const SCell* aCell) const; |
|
84 inline void Lock() const; |
|
85 inline void Unlock() const; |
|
86 inline TInt ChunkHandle() const; |
|
87 |
|
88 /** |
|
89 @internalComponent |
|
90 */ |
|
91 struct _s_align {char c; double d;}; |
|
92 |
|
93 /** |
|
94 The structure of a heap cell header for a heap cell on the free list. |
|
95 */ |
|
96 struct SCell { |
|
97 /** |
|
98 The length of the cell, which includes the length of |
|
99 this header. |
|
100 */ |
|
101 TInt len; |
|
102 |
|
103 |
|
104 /** |
|
105 A pointer to the next cell in the free list. |
|
106 */ |
|
107 SCell* next; |
|
108 }; |
|
109 |
|
110 /** |
|
111 The default cell alignment. |
|
112 */ |
|
113 enum {ECellAlignment = sizeof(_s_align)-sizeof(double)}; |
|
114 |
|
115 /** |
|
116 Size of a free cell header. |
|
117 */ |
|
118 enum {EFreeCellSize = sizeof(SCell)}; |
|
119 |
|
120 /** |
|
121 @internalComponent |
|
122 */ |
|
123 enum TDefaultShrinkRatios {EShrinkRatio1=256, EShrinkRatioDflt=512}; |
|
124 |
|
125 public: |
|
126 UIMPORT_C RSymbianDLHeap(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue); |
|
127 UIMPORT_C RSymbianDLHeap(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign=0, TBool aSingleThread=EFalse); |
|
128 inline RSymbianDLHeap(); |
|
129 |
|
130 TAny* operator new(TUint aSize, TAny* aBase) __NO_THROW; |
|
131 inline void operator delete(TAny*, TAny*); |
|
132 |
|
133 protected: |
|
134 UIMPORT_C SCell* GetAddress(const TAny* aCell) const; |
|
135 |
|
136 public: |
|
137 TInt iMinLength; |
|
138 TInt iMaxLength; // maximum bytes used by the allocator in total |
|
139 TInt iOffset; // offset of RSymbianDLHeap object from chunk base |
|
140 TInt iGrowBy; |
|
141 |
|
142 TInt iChunkHandle; // handle of chunk |
|
143 RFastLock iLock; |
|
144 TUint8* iBase; // bottom of DL memory, i.e. this+sizeof(RSymbianDLHeap) |
|
145 TUint8* iTop; // top of DL memory (page aligned) |
|
146 TInt iAlign; |
|
147 TInt iMinCell; |
|
148 TInt iPageSize; |
|
149 SCell iFree; |
|
150 protected: |
|
151 TInt iNestingLevel; |
|
152 TInt iAllocCount; |
|
153 TAllocFail iFailType; |
|
154 TInt iFailRate; |
|
155 TBool iFailed; |
|
156 TInt iFailAllocCount; |
|
157 TInt iRand; |
|
158 TAny* iTestData; |
|
159 protected: |
|
160 TInt iChunkSize; // currently allocated bytes in the chunk (== chunk.Size()) |
|
161 malloc_state iGlobalMallocState; |
|
162 malloc_params mparams; |
|
163 private: |
|
164 void Init(TInt aBitmapSlab, TInt aPagePower, size_t aTrimThreshold);/*Init internal data structures*/ |
|
165 inline int init_mparams(size_t aTrimThreshold /*= DEFAULT_TRIM_THRESHOLD*/); |
|
166 inline void init_bins(mstate m); |
|
167 inline void init_top(mstate m, mchunkptr p, size_t psize); |
|
168 void* sys_alloc(mstate m, size_t nb); |
|
169 msegmentptr segment_holding(mstate m, TUint8* addr); |
|
170 void add_segment(mstate m, TUint8* tbase, size_t tsize, flag_t mmapped); |
|
171 int sys_trim(mstate m, size_t pad); |
|
172 int has_segment_link(mstate m, msegmentptr ss); |
|
173 size_t release_unused_segments(mstate m); |
|
174 void* mmap_alloc(mstate m, size_t nb);/*Need to check this function*/ |
|
175 void* prepend_alloc(mstate m, TUint8* newbase, TUint8* oldbase, size_t nb); |
|
176 void* tmalloc_large(mstate m, size_t nb); |
|
177 void* tmalloc_small(mstate m, size_t nb); |
|
178 /*MACROS converted functions*/ |
|
179 static inline void unlink_first_small_chunk(mstate M,mchunkptr B,mchunkptr P,bindex_t& I); |
|
180 static inline void insert_small_chunk(mstate M,mchunkptr P, size_t S); |
|
181 static inline void insert_chunk(mstate M,mchunkptr P,size_t S); |
|
182 static inline void unlink_large_chunk(mstate M,tchunkptr X); |
|
183 static inline void unlink_small_chunk(mstate M, mchunkptr P,size_t S); |
|
184 static inline void unlink_chunk(mstate M, mchunkptr P, size_t S); |
|
185 static inline void compute_tree_index(size_t S, bindex_t& I); |
|
186 static inline void insert_large_chunk(mstate M,tchunkptr X,size_t S); |
|
187 static inline void replace_dv(mstate M, mchunkptr P, size_t S); |
|
188 static inline void compute_bit2idx(binmap_t X,bindex_t& I); |
|
189 /*MACROS converted functions*/ |
|
190 TAny* SetBrk(TInt32 aDelta); |
|
191 void* internal_realloc(mstate m, void* oldmem, size_t bytes); |
|
192 void internal_malloc_stats(mstate m); |
|
193 int change_mparam(int param_number, int value); |
|
194 #if !NO_MALLINFO |
|
195 mallinfo internal_mallinfo(mstate m); |
|
196 #endif |
|
197 void Init_Dlmalloc(size_t capacity, int locked, size_t aTrimThreshold); |
|
198 void* dlmalloc(size_t); |
|
199 void dlfree(void*); |
|
200 void* dlrealloc(void*, size_t); |
|
201 int dlmallopt(int, int); |
|
202 size_t dlmalloc_footprint(void); |
|
203 size_t dlmalloc_max_footprint(void); |
|
204 #if !NO_MALLINFO |
|
205 struct mallinfo dlmallinfo(void); |
|
206 #endif |
|
207 int dlmalloc_trim(size_t); |
|
208 size_t dlmalloc_usable_size(void*); |
|
209 void dlmalloc_stats(void); |
|
210 inline mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb); |
|
211 |
|
212 /****************************Code Added For DL heap**********************/ |
|
213 friend class CNewSymbianHeapPool; |
|
214 private: |
|
215 unsigned short slab_threshold; |
|
216 unsigned short page_threshold; // 2^n is smallest cell size allocated in paged allocator |
|
217 slab* partial_page;// partial-use page tree |
|
218 page* spare_page; // single empty page cached |
|
219 unsigned char sizemap[(maxslabsize>>2)+1]; // index of slabset based on size class |
|
220 private: |
|
221 static void tree_remove(slab* s); |
|
222 static void tree_insert(slab* s,slab** r); |
|
223 public: |
|
224 enum {okbits = (1<<(maxslabsize>>2))-1}; |
|
225 void slab_init(unsigned slabbitmap); |
|
226 void* slab_allocate(slabset& allocator); |
|
227 void slab_free(void* p); |
|
228 void* allocnewslab(slabset& allocator); |
|
229 void* allocnewpage(slabset& allocator); |
|
230 void freeslab(slab* s); |
|
231 void freepage(page* p); |
|
232 void* map(void* p,unsigned sz); |
|
233 void* remap(void* p,unsigned oldsz,unsigned sz); |
|
234 void unmap(void* p,unsigned sz); |
|
235 /**I think we need to move this functions to slab allocator class***/ |
|
236 static inline unsigned header_free(unsigned h) |
|
237 {return (h&0x000000ff);} |
|
238 static inline unsigned header_pagemap(unsigned h) |
|
239 {return (h&0x00000f00)>>8;} |
|
240 static inline unsigned header_size(unsigned h) |
|
241 {return (h&0x0003f000)>>12;} |
|
242 static inline unsigned header_usedm4(unsigned h) |
|
243 {return (h&0x0ffc0000)>>18;} |
|
244 /***paged allocator code***/ |
|
245 void paged_init(unsigned pagepower); |
|
246 void* paged_allocate(unsigned size); |
|
247 void paged_free(void* p); |
|
248 void* paged_reallocate(void* p, unsigned size); |
|
249 pagecell* paged_descriptor(const void* p) const ; |
|
250 private: |
|
251 // paged allocator structures |
|
252 enum {npagecells=4}; |
|
253 pagecell pagelist[npagecells]; // descriptors for page-aligned large allocations |
|
254 /** |
|
255 *always keep this variable at the bottom of the class as its used as |
|
256 * array of more than 1 objest though it declared a single. |
|
257 * TODO: dynamic sizing of heap object based on slab configuration. |
|
258 * Just allocate maximum number of slabsets for now |
|
259 * */ |
|
260 slabset slaballoc[15]; |
|
261 }; |
|
262 |
|
263 |
|
264 inline void RSymbianDLHeap::Lock() const |
|
265 /** |
|
266 @internalComponent |
|
267 */ |
|
268 {((RFastLock&)iLock).Wait();} |
|
269 |
|
270 |
|
271 |
|
272 |
|
273 inline void RSymbianDLHeap::Unlock() const |
|
274 /** |
|
275 @internalComponent |
|
276 */ |
|
277 {((RFastLock&)iLock).Signal();} |
|
278 |
|
279 |
|
280 inline TInt RSymbianDLHeap::ChunkHandle() const |
|
281 /** |
|
282 @internalComponent |
|
283 */ |
|
284 { |
|
285 return iChunkHandle; |
|
286 } |
|
287 |
|
288 inline void RSymbianDLHeap::operator delete(TAny*, TAny*) |
|
289 /** |
|
290 Called if constructor issued by operator new(TUint aSize, TAny* aBase) throws exception. |
|
291 This is dummy as corresponding new operator does not allocate memory. |
|
292 */ |
|
293 {} |
|
294 |
|
295 #endif /* SYMBIANDLHEAP_H_ */ |