kernel/eka/include/heap_hybrid.h
changeset 109 b3a1d9898418
equal deleted inserted replaced
102:ef2a444a7410 109:b3a1d9898418
       
     1 // Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies).
       
     2 // All rights reserved.
       
     3 // This component and the accompanying materials are made available
       
     4 // under the terms of the License "Eclipse Public License v1.0"
       
     5 // which accompanies this distribution, and is available
       
     6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     7 //
       
     8 // Initial Contributors:
       
     9 // Nokia Corporation - initial contribution.
       
    10 //
       
    11 // Contributors:
       
    12 //
       
    13 // Description:
       
    14 // kernel\eka\include\heap_hybrid.h
       
    15 // 
       
    16 // Uses malloc (aka dlmalloc) written by Doug Lea version 2.8.4
       
    17 //
       
    18 
       
    19 #ifndef __HEAP_HYBRID_H__
       
    20 #define __HEAP_HYBRID_H__
       
    21 
       
    22 #include <e32cmn.h>
       
    23 
       
    24 #ifdef __WINS__
       
    25 #define USE_HYBRID_HEAP 0
       
    26 #else
       
    27 #define USE_HYBRID_HEAP 1
       
    28 #endif
       
    29 
       
    30 // This stuff is all temporary in order to prevent having to include dla.h from heap_hybrid.h, which causes
       
    31 // problems due to its definition of size_t (and possibly other types).  This is unfortunate but we cannot
       
    32 // pollute the namespace with these types or it will cause problems with Open C and other POSIX compatibility
       
    33 // efforts in Symbian
       
    34 
       
    35 #define NSMALLBINS        (32U)
       
    36 #define NTREEBINS         (32U)
       
    37 
       
    38 #ifndef MALLOC_ALIGNMENT
       
    39 	#define MALLOC_ALIGNMENT ((TUint)8U)
       
    40 #endif  /* MALLOC_ALIGNMENT */
       
    41 
       
    42 #define CHUNK_OVERHEAD      (sizeof(TUint))
       
    43 
       
    44 typedef unsigned int bindex_t;
       
    45 typedef unsigned int binmap_t;
       
    46 typedef struct malloc_chunk* mchunkptr;
       
    47 typedef struct malloc_segment msegment;
       
    48 typedef struct malloc_state* mstate;
       
    49 typedef struct malloc_tree_chunk* tbinptr;
       
    50 typedef struct malloc_tree_chunk* tchunkptr;
       
    51 
       
    52 struct malloc_segment {
       
    53   TUint8*        iBase;             /* base address */
       
    54   TUint       iSize;             /* allocated size */
       
    55 };
       
    56 
       
    57 struct malloc_state {
       
    58 	binmap_t	 iSmallMap;
       
    59 	binmap_t	 iTreeMap;
       
    60 	TUint		 iDvSize;
       
    61 	TUint		 iTopSize;
       
    62 	mchunkptr	iDv;
       
    63 	mchunkptr	iTop;
       
    64 	TUint		 iTrimCheck;
       
    65 	mchunkptr	iSmallBins[(NSMALLBINS+1)*2];
       
    66 	tbinptr		iTreeBins[NTREEBINS];
       
    67 	msegment	iSeg;
       
    68 	};
       
    69 
       
    70 class RHybridHeap : public RHeap
       
    71 	{
       
    72 
       
    73 public:
       
    74 
       
    75 	struct HeapInfo
       
    76 		{
       
    77 	    unsigned iFootprint;
       
    78 	    unsigned iMaxSize;
       
    79 	    unsigned iAllocBytes;
       
    80 	    unsigned iAllocN;
       
    81 	    unsigned iFreeBytes;
       
    82 	    unsigned iFreeN;
       
    83 		};
       
    84 
       
    85 	struct SHeapCellInfo { RHybridHeap* iHeap; TInt iTotalAlloc;	TInt iTotalAllocSize; TInt iTotalFree; TInt iLevelAlloc; SDebugCell* iStranded; };
       
    86 
       
    87 
       
    88 	/**
       
    89     @internalComponent
       
    90     */
       
    91 	enum TAllocatorType
       
    92 			{ESlabAllocator, EDougLeaAllocator, EPageAllocator, EFullSlab=0x80, EPartialFullSlab=0x40, EEmptySlab=0x20, ESlabSpare=0x10, ESlabMask=0xf0};
       
    93 
       
    94 
       
    95 	/**
       
    96     @internalComponent
       
    97     */
       
    98 	struct SWalkInfo {
       
    99 					  /**
       
   100 	                  Walk function address shall be called
       
   101 	                  */
       
   102 		TWalkFunc iFunction;
       
   103 
       
   104 					  /**
       
   105 	                  The first parameter for callback function
       
   106 	                  */
       
   107 		TAny*        iParam;
       
   108 					  /**
       
   109 	                  Pointer to RHybridHeap object
       
   110 	                  */
       
   111 		RHybridHeap* iHeap;
       
   112 		};
       
   113 
       
   114 	/**
       
   115     @internalComponent
       
   116     */
       
   117 	struct SConfig {
       
   118 					  /**
       
   119 	                  Required slab configuration ( bit 0=4, bit 1=8 ..
       
   120 	                  bit 13 = 56)
       
   121 	                  */
       
   122 		TUint32 iSlabBits;
       
   123 					  /**
       
   124 	                  Delayed slab threshold in bytes (0 = no threshold)
       
   125 	                  */
       
   126 		TInt   iDelayedSlabThreshold;
       
   127 					  /**
       
   128 	                  2^n is smallest size allocated in paged allocator (14-31 = 16 Kb --> )
       
   129 	                  */
       
   130 		TInt   iPagePower;
       
   131 
       
   132 		};
       
   133 
       
   134 	/**
       
   135 	@internalComponent
       
   136 
       
   137 	This structure is used by test code for configuring the allocators and obtaining information
       
   138 	from them in order to ensure they are behaving as required.  This is internal test specific
       
   139 	code and is liable to be changed without warning at any time.  You should under no circumstances
       
   140 	be using it!
       
   141 	*/
       
   142 	struct STestCommand
       
   143 		{
       
   144 		TInt	iCommand;			// The test related command to be executed
       
   145 
       
   146 		union
       
   147 			{
       
   148 			SConfig		iConfig;	// Configuration used by test code only
       
   149 			TAny*		iData;		// Extra supporting data for the test command
       
   150 			};
       
   151 		};
       
   152 
       
   153 	/**
       
   154 	@internalComponent
       
   155 
       
   156 	Commands used by test code for configuring the allocators and obtaining information them them
       
   157 	*/
       
   158 	enum TTestCommand { EGetConfig, ESetConfig, EHeapMetaData, ETestData };
       
   159 
       
   160 	virtual TAny* Alloc(TInt aSize);
       
   161 	virtual void Free(TAny* aPtr);
       
   162 	virtual TAny* ReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0);
       
   163 	virtual TInt AllocLen(const TAny* aCell) const;
       
   164 #ifndef __KERNEL_MODE__	
       
   165 	virtual TInt Compress();
       
   166 	virtual void Reset();
       
   167 	virtual TInt AllocSize(TInt& aTotalAllocSize) const;
       
   168 	virtual TInt Available(TInt& aBiggestBlock) const;
       
   169 #endif	
       
   170 	virtual TInt DebugFunction(TInt aFunc, TAny* a1=NULL, TAny* a2=NULL);
       
   171 protected:
       
   172 	virtual TInt Extension_(TUint aExtensionId, TAny*& a0, TAny* a1);
       
   173 
       
   174 public:
       
   175 	TAny* operator new(TUint aSize, TAny* aBase) __NO_THROW;
       
   176 	void operator delete(TAny*, TAny*);
       
   177 	
       
   178 private:
       
   179 	TInt DoCountAllocFree(TInt& aFree);
       
   180 	TInt DoCheckHeap(SCheckInfo* aInfo);
       
   181 	void DoMarkStart();
       
   182 	TUint32 DoMarkEnd(TInt aExpected);
       
   183 	void DoSetAllocFail(TAllocFail aType, TInt aRate);
       
   184 	TBool CheckForSimulatedAllocFail();
       
   185 	void DoSetAllocFail(TAllocFail aType, TInt aRate, TUint aBurst);
       
   186 	
       
   187 	void Lock() const;
       
   188 	void Unlock() const;
       
   189 	TInt ChunkHandle() const;
       
   190 
       
   191 	RHybridHeap(TInt aChunkHandle, TInt aOffset, TInt aMinLength, TInt aMaxLength, TInt aGrowBy, TInt aAlign, TBool aSingleThread, TBool aDlOnly, TBool aUseAdjust);	
       
   192 	RHybridHeap(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue);
       
   193 	RHybridHeap();
       
   194 	
       
   195 	void Init(TInt aBitmapSlab, TInt aPagePower);
       
   196 	inline void InitBins(mstate m);
       
   197 	inline void InitTop(mstate m, mchunkptr p, TUint psize);
       
   198     void* SysAlloc(mstate m, TUint nb);
       
   199 	int SysTrim(mstate m, TUint pad);
       
   200 	void* TmallocLarge(mstate m, TUint nb);
       
   201 	void* TmallocSmall(mstate m, TUint nb);
       
   202 	/*MACROS converted functions*/
       
   203 	static inline void UnlinkFirstSmallChunk(mstate M,mchunkptr B,mchunkptr P,bindex_t& I);
       
   204 	static inline void InsertSmallChunk(mstate M,mchunkptr P, TUint S);
       
   205 	static inline void InsertChunk(mstate M,mchunkptr P,TUint S);
       
   206 	static inline void UnlinkLargeChunk(mstate M,tchunkptr X);
       
   207 	static inline void UnlinkSmallChunk(mstate M, mchunkptr P,TUint S);
       
   208 	static inline void UnlinkChunk(mstate M, mchunkptr P, TUint S);
       
   209 	static inline void ComputeTreeIndex(TUint S, bindex_t& I);
       
   210 	static inline void InsertLargeChunk(mstate M,tchunkptr X,TUint S);
       
   211 	static inline void ReplaceDv(mstate M, mchunkptr P, TUint S);
       
   212 	static inline void ComputeBit2idx(binmap_t X,bindex_t& I);
       
   213 
       
   214 	void DoComputeTreeIndex(TUint S, bindex_t& I);	
       
   215 	void DoCheckAnyChunk(mstate m, mchunkptr p);
       
   216 	void DoCheckTopChunk(mstate m, mchunkptr p);
       
   217 	void DoCheckInuseChunk(mstate m, mchunkptr p);
       
   218 	void DoCheckFreeChunk(mstate m, mchunkptr p);
       
   219 	void DoCheckMallocedChunk(mstate m, void* mem, TUint s);
       
   220 	void DoCheckTree(mstate m, tchunkptr t);
       
   221 	void DoCheckTreebin(mstate m, bindex_t i);
       
   222 	void DoCheckSmallbin(mstate m, bindex_t i);
       
   223 	TInt BinFind(mstate m, mchunkptr x);
       
   224 	TUint TraverseAndCheck(mstate m);
       
   225 	void DoCheckMallocState(mstate m);	
       
   226 	
       
   227 	TInt GetInfo(struct HeapInfo* i, SWalkInfo* wi=NULL) const;	
       
   228 	void InitDlMalloc(TUint capacity, int locked);
       
   229 	void* DlMalloc(TUint);
       
   230 	void  DlFree(void*);
       
   231 	void* DlRealloc(void*, TUint, TInt);
       
   232 	TUint DlInfo(struct HeapInfo* i, SWalkInfo* wi) const;
       
   233 	void DoCheckCommittedSize(TInt aNPages, mstate aM);	
       
   234 	
       
   235 	TAny* ReAllocImpl(TAny* aPtr, TInt aSize, TInt aMode);
       
   236 	void Construct(TBool aSingleThread, TBool aDLOnly, TBool aUseAdjust, TInt aAlign);
       
   237 #ifndef __KERNEL_MODE__	
       
   238 	TInt ConstructLock(TUint32 aMode);
       
   239 #endif	
       
   240 	static void Walk(SWalkInfo* aInfo, TAny* aBfr, TInt aLth, TCellType aBfrType, TAllocatorType aAlloctorType);
       
   241 	static void WalkCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen);
       
   242 	void* Map(void* p, TInt sz);
       
   243 	void Unmap(void* p,TInt sz);
       
   244 
       
   245 private:
       
   246 	TInt iMinLength;
       
   247 	TInt iOffset;				// offset of RHeap object from chunk base
       
   248 	TInt iGrowBy;
       
   249 	TInt iMinCell;
       
   250 	TInt iPageSize;
       
   251 
       
   252 	// Temporarily commented out and exported from RHeap to prevent source breaks from req417-52840.
       
   253 	// This will be moved with another REQ after submission and subsequent fixing of bad code
       
   254 	//TInt iNestingLevel;
       
   255 	TInt iAllocCount;
       
   256 	// Temporarily commented out.  See comment above regarding req417-52840 source breaks
       
   257 	//TAllocFail iFailType;
       
   258 	TInt iFailRate;
       
   259 	TBool iFailed;
       
   260 	TInt iFailAllocCount;
       
   261 	TInt iRand;
       
   262 	// Temporarily commented out.  See comment above regarding req417-52840 source breaks
       
   263 	//TAny* iTestData;
       
   264 
       
   265 	TInt iChunkSize;				
       
   266 	TInt iHighWaterMark;
       
   267 	TBool iUseAdjust;
       
   268 	TBool iDLOnly;
       
   269 
       
   270 	malloc_state iGlobalMallocState;
       
   271 	
       
   272 #ifdef __KERNEL_MODE__
       
   273 	
       
   274 	friend class RHeapK;
       
   275 	
       
   276 #else
       
   277 	
       
   278 	friend class UserHeap;
       
   279 	friend class HybridHeap;
       
   280 	friend class TestHybridHeap;
       
   281 	
       
   282 private:
       
   283 
       
   284 	static void TreeRemove(slab* s);
       
   285 	static void TreeInsert(slab* s,slab** r);
       
   286 
       
   287 	enum {EOkBits = (1<<(MAXSLABSIZE>>2))-1};
       
   288 	
       
   289 	void SlabInit();
       
   290 	void SlabConfig(unsigned slabbitmap);
       
   291 	void* SlabAllocate(slabset& allocator);
       
   292 	void SlabFree(void* p);
       
   293 	void* AllocNewSlab(slabset& allocator);
       
   294 	void* AllocNewPage(slabset& allocator);
       
   295 	void* InitNewSlab(slabset& allocator, slab* s);
       
   296 	void FreeSlab(slab* s);
       
   297 	void FreePage(page* p);
       
   298 	void SlabInfo(struct HeapInfo* i, SWalkInfo* wi) const;
       
   299 	static void SlabFullInfo(slab* s, struct HeapInfo* i, SWalkInfo* wi);
       
   300 	static void SlabPartialInfo(slab* s, struct HeapInfo* i, SWalkInfo* wi);
       
   301 	static void SlabEmptyInfo(slab* s, struct HeapInfo* i, SWalkInfo* wi);
       
   302 	static void TreeWalk(slab* const* root, void (*f)(slab*, struct HeapInfo*, SWalkInfo*), struct HeapInfo* i, SWalkInfo* wi);
       
   303 
       
   304 	static void WalkPartialFullSlab(SWalkInfo* aInfo, slab* aSlab, TCellType aBfrType, TInt aLth);
       
   305 	static void WalkFullSlab(SWalkInfo* aInfo, slab* aSlab, TCellType aBfrType, TInt aLth);
       
   306 	void DoCheckSlab(slab* aSlab, TAllocatorType aSlabType, TAny* aBfr=NULL);
       
   307 	void DoCheckSlabTrees();	
       
   308 	void DoCheckSlabTree(slab** aS, TBool aPartialPage);
       
   309 	void BuildPartialSlabBitmap(TUint32* aBitmap, slab* aSlab, TAny* aBfr=NULL);
       
   310 	
       
   311 	static inline unsigned SlabHeaderFree(unsigned h) 
       
   312 	{return (h&0x000000ff);}
       
   313 	static inline unsigned SlabHeaderPagemap(unsigned h) 
       
   314 	{return (h&0x00000f00)>>8;}
       
   315 	static inline unsigned SlabHeaderSize(unsigned h) 
       
   316 	{return (h&0x0003f000)>>12;}
       
   317 	static inline unsigned SlabHeaderUsedm4(unsigned h) 
       
   318 	{return (h&0x0ffc0000)>>18;}
       
   319 	/***paged allocator code***/
       
   320 	void PagedInit(TInt aPagePower);
       
   321 	void* PagedAllocate(unsigned size);
       
   322 	void PagedFree(void* p);	
       
   323 	void* PagedReallocate(void* p, unsigned size, TInt mode);
       
   324 	
       
   325 	bool PagedEncode(unsigned pos, unsigned npage);
       
   326 	unsigned PagedDecode(unsigned pos) const;
       
   327 	inline unsigned PagedSize(void* p) const;
       
   328 	inline bool PagedSetSize(void* p, unsigned size);
       
   329 	inline void PagedZapSize(void* p, unsigned size);
       
   330 	inline void* Bitmap2addr(unsigned pos) const;
       
   331 	void PagedInfo(struct HeapInfo* i, SWalkInfo* wi) const;
       
   332 	void ResetBitmap();
       
   333 	TBool CheckBitmap(void* aBfr, TInt aSize, TUint32& aDummy, TInt& aNPages);	
       
   334 	
       
   335 private:
       
   336 	paged_bitmap iPageMap;							// bitmap representing page allocator's pages
       
   337 	TUint8*		iMemBase;							// bottom of paged/slab memory (chunk base)
       
   338 	TUint8		iBitMapBuffer[MAXSMALLPAGEBITS>>3];	// buffer for initial page bitmap
       
   339 	TInt		iSlabThreshold;						// allocations < than this are done by the slab allocator
       
   340 	TInt		iPageThreshold;						// 2^n is smallest cell size allocated in paged allocator
       
   341 	TInt		iSlabInitThreshold;					// slab allocator will be used after chunk reaches this size
       
   342 	TUint32		iSlabConfigBits;					// set of bits that specify which slab sizes to use
       
   343 	slab*		iPartialPage;						// partial-use page tree
       
   344 	slab*		iFullSlab;							// full slabs list (so we can find them when walking)
       
   345 	page*		iSparePage;							// cached, to avoid kernel exec calls for unmapping/remapping
       
   346 	TUint8		iSizeMap[(MAXSLABSIZE>>2)+1];		// index of slabset indexes based on size class
       
   347 	slabset		iSlabAlloc[MAXSLABSIZE>>2];			// array of pointers to slabsets
       
   348 
       
   349 #endif // __KERNEL_MODE__	
       
   350 };
       
   351 
       
   352 #define HEAP_ASSERT(x) __ASSERT_DEBUG(x, HEAP_PANIC(ETHeapBadCellAddress))
       
   353 
       
   354 template <class T> inline T Floor(const T addr, unsigned aln)
       
   355 {return T((unsigned(addr))&~(aln-1));}
       
   356 template <class T> inline T Ceiling(T addr, unsigned aln)
       
   357 {return T((unsigned(addr)+(aln-1))&~(aln-1));}
       
   358 template <class T> inline unsigned LowBits(T addr, unsigned aln)
       
   359 {return unsigned(addr)&(aln-1);}
       
   360 template <class T1, class T2> inline int PtrDiff(const T1* a1, const T2* a2)
       
   361 {return reinterpret_cast<const unsigned char*>(a1) - reinterpret_cast<const unsigned char*>(a2);}
       
   362 template <class T> inline T Offset(T addr, unsigned ofs)
       
   363 {return T(unsigned(addr)+ofs);}
       
   364 
       
   365 #endif //__HEAP_HYBRID_H__