--- a/kerneltest/e32test/heap/t_heap2.cpp Tue Aug 31 16:34:26 2010 +0300
+++ b/kerneltest/e32test/heap/t_heap2.cpp Wed Sep 01 12:34:56 2010 +0100
@@ -44,10 +44,6 @@
#include <e32hal.h>
#include <e32def.h>
#include <e32def_private.h>
-#include "dla.h"
-#include "slab.h"
-#include "page_alloc.h"
-#include "heap_hybrid.h"
// Needed for KHeapShrinkHysRatio which is now ROM 'patchdata'
#include "TestRHeapShrink.h"
@@ -62,10 +58,6 @@
const TInt KHeapMinCellSize = 0;
#endif
- const TInt KAllocCellSize = (TInt)RHeap::EAllocCellSize;
- const TInt KSizeOfHeap = (TInt)sizeof(RHybridHeap);
-
-
RTest test(_L("T_HEAP2"));
#define TEST_ALIGN(p,a) test((TLinAddr(p)&((a)-1))==0)
@@ -86,6 +78,7 @@
{
TInt i;
TUint32 x = (TUint32)this ^ (TUint32)aLength ^ (TUint32)EMagic;
+ aLength -= RHeap::EAllocCellSize;
if (aLength==0)
return;
iLength = x;
@@ -107,7 +100,8 @@
{
TInt i;
TUint32 x = (TUint32)aInitPtr ^ (TUint32)aInitLength ^ (TUint32)EMagic;
- if ( aLength < (TInt) sizeof(*this) )
+ aLength -= RHeap::EAllocCellSize;
+ if (aLength==0)
return;
test(iLength == x);
aLength /= sizeof(TUint32);
@@ -119,11 +113,26 @@
}
}
-
-
class RTestHeap : public RHeap
{
public:
+ DECL_GET(TInt,AccessCount)
+ DECL_GET(TInt,HandleCount)
+ DECL_GET(TInt*,Handles)
+ DECL_GET(TUint32,Flags)
+ DECL_GET(TInt,CellCount)
+ DECL_GET(TInt,TotalAllocSize)
+ DECL_GET(TInt,MinLength)
+ DECL_GET(TInt,Offset)
+ DECL_GET(TInt,GrowBy)
+ DECL_GET(TInt,ChunkHandle)
+ DECL_GET2(const RFastLock&,Lock,LockRef)
+ DECL_GET(TUint8*,Top)
+ DECL_GET(TInt,Align)
+ DECL_GET(TInt,MinCell)
+ DECL_GET(TInt,PageSize)
+ DECL_GET2(const SCell&,Free,FreeRef)
+public:
TInt CheckAllocatedCell(const TAny* aCell) const;
void FullCheckAllocatedCell(const TAny* aCell) const;
TAny* TestAlloc(TInt aSize);
@@ -131,11 +140,23 @@
TAny* TestReAlloc(TAny* aPtr, TInt aSize, TInt aMode=0);
void FullCheck();
static void WalkFullCheckCell(TAny* aPtr, TCellType aType, TAny* aCell, TInt aLen);
+ TInt FreeCellLen(const TAny* aPtr) const;
+ static RTestHeap* FixedHeap(TInt aMaxLength, TInt aAlign=0, TBool aSingleThread=ETrue);
+ void TakeChunkOwnership(RChunk aChunk);
+ TInt LastFreeCellLen(void) const;
+ TInt CalcComp(TInt aCompSize);
+ void ForceCompress(TInt aFreed);
};
TInt RTestHeap::CheckAllocatedCell(const TAny* aCell) const
{
- TInt len = AllocLen(aCell);
+ SCell* pC = GetAddress(aCell);
+ TInt len = pC->len;
+ TUint8* pEnd = (TUint8*)pC + len;
+ TEST_ALIGN(aCell, iAlign);
+ TEST_ALIGN(len, iAlign);
+ test(len >= iMinCell);
+ test((TUint8*)pC>=iBase && pEnd<=iTop);
return len;
}
@@ -150,7 +171,7 @@
if (p)
{
TInt len = CheckAllocatedCell(p);
- test(len>=aSize);
+ test((len-RHeap::EAllocCellSize)>=aSize);
((STestCell*)p)->Set(len);
}
return p;
@@ -175,7 +196,7 @@
return p;
}
TInt new_len = CheckAllocatedCell(p);
- test(new_len>=aSize);
+ test((new_len-RHeap::EAllocCellSize)>=aSize);
if (p == aPtr)
{
((STestCell*)p)->Verify(p, old_len, Min(old_len, new_len));
@@ -186,8 +207,9 @@
test(!(aMode & ENeverMove));
test((new_len > old_len) || (aMode & EAllowMoveOnShrink));
if (old_len)
- ((STestCell*)p)->Verify(aPtr, old_len, Min(old_len, aSize));
- ((STestCell*)p)->Set(new_len);
+ ((STestCell*)p)->Verify(aPtr, old_len, Min(old_len, new_len));
+ if (new_len != old_len)
+ ((STestCell*)p)->Set(new_len);
return p;
}
@@ -208,15 +230,22 @@
{
case EGoodAllocatedCell:
{
- TInt len = aLen;
- info.iTotalAllocSize += len;
- STestCell* pT = (STestCell*)aCell;
+ test(aCell == info.iNextCell);
+ TInt len = ((SCell*)aCell)->len;
+ test(len == aLen);
+ info.iNextCell += len;
++info.iTotalAlloc;
+ info.iTotalAllocSize += (aLen-EAllocCellSize);
+ STestCell* pT = (STestCell*)((TUint8*)aCell + EAllocCellSize);
pT->Verify(len);
break;
}
case EGoodFreeCell:
{
+ test(aCell == info.iNextCell);
+ TInt len = ((SCell*)aCell)->len;
+ test(len == aLen);
+ info.iNextCell += len;
++info.iTotalFree;
break;
}
@@ -232,13 +261,877 @@
::SHeapCellInfo info;
Mem::FillZ(&info, sizeof(info));
info.iHeap = this;
+ info.iNextCell = iBase;
DebugFunction(EWalk, (TAny*)&WalkFullCheckCell, &info);
- TInt count = AllocSize(iTotalAllocSize);
- test(info.iTotalAlloc == count);
+ test(info.iNextCell == iTop);
+ test(info.iTotalAlloc == iCellCount);
test(info.iTotalAllocSize == iTotalAllocSize);
+ }
+TInt RTestHeap::FreeCellLen(const TAny* aPtr) const
+ {
+ SCell* p = iFree.next;
+ SCell* q = (SCell*)((TUint8*)aPtr - EAllocCellSize);
+ for (; p && p!=q; p = p->next) {}
+ if (p == q)
+ return p->len - EAllocCellSize;
+ return -1;
+ }
+
+TInt RTestHeap::LastFreeCellLen(void) const
+ {
+ SCell* p = iFree.next;
+ if (p==NULL)
+ return -1;
+ for (; p->next; p=p->next){}
+ return p->len;
+ }
+
+
+/** Checks whether a call to Compress() will actually perform a reduction
+ of the heap.
+ Relies on the free last cell on the heap being cell that has just been freed
+ plus any extra.
+ Intended for use by t_heap2.cpp - DoTest4().
+ @param aFreedSize The size in bytes of the cell that was freed
+*/
+TInt RTestHeap::CalcComp(TInt aFreedSize)
+ {
+ TInt largestCell=0;
+ largestCell = LastFreeCellLen();
+ // if the largest cell is too small or it would have been compressed by the
+ // free operation then return 0.
+ if (largestCell < iPageSize || aFreedSize >= KHeapShrinkHysRatio*(iGrowBy>>8))
+ {
+ return 0;
+ }
+ else
+ {
+ return _ALIGN_DOWN(aFreedSize,iPageSize);
+ }
}
+/** compress the heap if the KHeapShrinkRatio is too large for what we are
+ expecting in DoTest4().
+*/
+void RTestHeap::ForceCompress(TInt aFreed)
+ {
+ if (aFreed < KHeapShrinkHysRatio*(iGrowBy>>8))
+ {
+ Compress();
+ }
+ }
+RTestHeap* RTestHeap::FixedHeap(TInt aMaxLength, TInt aAlign, TBool aSingleThread)
+ {
+ RChunk c;
+ TInt bottom = 0x40000;
+ TInt top = bottom + aMaxLength;
+ TInt r = c.CreateDisconnectedLocal(bottom, top, top + bottom, EOwnerThread);
+ if (r!=KErrNone)
+ return NULL;
+ TUint8* base = c.Base() + bottom;
+ RTestHeap* h = (RTestHeap*)UserHeap::FixedHeap(base, aMaxLength, aAlign, aSingleThread);
+ if (!aAlign)
+ aAlign = RHeap::ECellAlignment;
+ test((TUint8*)h == base);
+ test(h->AccessCount() == 1);
+ test(h->HandleCount() == (aSingleThread ? 0 : 1));
+ test(h->Handles() == (aSingleThread ? NULL : (TInt*)&h->LockRef()));
+ test(h->Flags() == TUint32(RAllocator::EFixedSize | (aSingleThread ? RAllocator::ESingleThreaded : 0)));
+ test(h->CellCount() == 0);
+ test(h->TotalAllocSize() == 0);
+ test(h->MaxLength() == aMaxLength);
+ test(h->MinLength() == h->Top() - (TUint8*)h);
+ test(h->Offset() == 0);
+ test(h->GrowBy() == 0);
+ test(h->ChunkHandle() == 0);
+ test(h->Align() == aAlign);
+ TInt min_cell = _ALIGN_UP((KHeapMinCellSize + Max((TInt)RHeap::EAllocCellSize, (TInt)RHeap::EFreeCellSize)), aAlign);
+ TInt hdr_len = _ALIGN_UP(sizeof(RHeap) + RHeap::EAllocCellSize, aAlign) - RHeap::EAllocCellSize;
+ TInt user_len = _ALIGN_DOWN(aMaxLength - hdr_len, aAlign);
+ test(h->Base() == base + hdr_len);
+ test(h->MinCell() == min_cell);
+ test(h->Top() - h->Base() == user_len);
+ test(h->FreeRef().next == (RHeap::SCell*)h->Base());
+ h->TakeChunkOwnership(c);
+ return h;
+ }
+
+void RTestHeap::TakeChunkOwnership(RChunk aChunk)
+ {
+ iChunkHandle = aChunk.Handle();
+ ++iHandleCount;
+ iHandles = &iChunkHandle;
+ }
+
+
+#define ACCESS_COUNT(h) (((RTestHeap*)h)->AccessCount())
+#define HANDLE_COUNT(h) (((RTestHeap*)h)->HandleCount())
+#define HANDLES(h) (((RTestHeap*)h)->Handles())
+#define FLAGS(h) (((RTestHeap*)h)->Flags())
+#define CELL_COUNT(h) (((RTestHeap*)h)->CellCount())
+#define TOTAL_ALLOC_SIZE(h) (((RTestHeap*)h)->TotalAllocSize())
+#define MIN_LENGTH(h) (((RTestHeap*)h)->MinLength())
+#define OFFSET(h) (((RTestHeap*)h)->Offset())
+#define GROW_BY(h) (((RTestHeap*)h)->GrowBy())
+#define CHUNK_HANDLE(h) (((RTestHeap*)h)->ChunkHandle())
+#define LOCK_REF(h) (((RTestHeap*)h)->LockRef())
+#define TOP(h) (((RTestHeap*)h)->Top())
+#define ALIGN(h) (((RTestHeap*)h)->Align())
+#define MIN_CELL(h) (((RTestHeap*)h)->MinCell())
+#define PAGE_SIZE(h) (((RTestHeap*)h)->PageSize())
+#define FREE_REF(h) (((RTestHeap*)h)->FreeRef())
+
+void DoTest1(RHeap* aH)
+ {
+ RTestHeap* h = (RTestHeap*)aH;
+ test.Printf(_L("Test Alloc: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
+ TInt l;
+ TAny* p = NULL;
+ TUint8* next = h->Base();
+ TUint8* top = h->Top();
+ TUint8* limit = (TUint8*)h + h->MaxLength();
+ TBool fixed = h->Flags() & RAllocator::EFixedSize;
+ for (l=1; l<=1024; ++l)
+ {
+ TInt remain1 = top - next;
+ TInt xl1 = _ALIGN_UP(Max((l+RHeap::EAllocCellSize), h->MinCell()), h->Align());
+ p = h->TestAlloc(l);
+ if ( (fixed && remain1 < xl1) || (next + xl1 > limit) )
+ {
+ test(p == NULL);
+ test(top == h->Top());
+ test.Printf(_L("Alloc failed at l=%d next=%08x\n"), l, next);
+ break;
+ }
+ test(p == next + RHeap::EAllocCellSize);
+ if (xl1 > remain1)
+ {
+ // no room for this cell
+ TInt g = h->GrowBy();
+ while (xl1 > remain1)
+ {
+ top += g;
+ remain1 += g;
+ }
+ }
+ test(top == h->Top());
+ if (xl1 + h->MinCell() > remain1)
+ {
+ // this cell fits but remainder is too small or nonexistent
+ xl1 = top - next;
+ next = top;
+ test(h->FreeRef().next == NULL);
+ }
+ else
+ {
+ // this cell fits and remainder can be reused
+ next += xl1;
+ }
+ test(aH->AllocLen(p) == xl1 - RHeap::EAllocCellSize);
+ }
+ h->FullCheck();
+ }
+
+void DoTest2(RHeap* aH)
+ {
+ RTestHeap* h = (RTestHeap*)aH;
+ test.Printf(_L("Test Free: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
+ TInt al;
+ TInt min = h->MinCell();
+ TBool pad = EFalse;
+ for (al=1; al<256; (void)((pad=!pad)!=0 || (al+=al+1)) )
+ {
+ TAny* p[32];
+ TInt last_len = 0;
+ TAny* last = NULL;
+ TInt i;
+ test.Printf(_L("al=%d pad=%d\n"), al, pad);
+ TUint8* top=0;
+ TAny* spare=0;
+ TBool heapReduced = EFalse;
+ for (i=0; i<32; ++i)
+ {
+ // Check whether the cell created for the allocation of al would end up
+ // including extra bytes from the last free cell that aren't enough
+ // to create a new free cell.
+ top = h->Top();
+ TInt freeLen=h->LastFreeCellLen();
+ TInt actualAllocBytes = Max(_ALIGN_UP(al + RHeap::EAllocCellSize, h->Align()), min);
+ TInt remainingBytes = freeLen - actualAllocBytes;
+ if (remainingBytes < min)
+ {
+ // Force the heap to grow so that once this allocation is freed
+ // the free cell left will be large enough to include the al allocation
+ // and to create a new free cell if necessary.
+ actualAllocBytes = _ALIGN_UP(actualAllocBytes + min, h->Align());
+ TAny* q = h->TestAlloc(actualAllocBytes);
+ // Check heap has grown
+ test(top < h->Top());
+ top = h->Top();
+ test(q!=NULL);
+ // Have grown the heap so allocate a cell as a place holder to stop
+ // the heap being shrunk and the actual cell we want to allocate from being the
+ // wrong size
+ spare=h->TestAlloc(8);
+ h->TestFree(q);
+ // Ensure heap wasn't shrunk after free
+ test(top == h->Top());
+ }
+ top = h->Top();
+ // Allocate the new
+ p[i] = h->TestAlloc(al);
+ test(p[i]!=NULL);
+ if (remainingBytes < min)
+ {// now safe to free any padding as p[i] now allocated and its size can't change
+ h->TestFree(spare);
+ }
+ TInt tmp1=h->AllocLen(p[i]);
+ TInt tmp2=Max(_ALIGN_UP(al+RHeap::EAllocCellSize,h->Align()), min)-RHeap::EAllocCellSize;
+ test(tmp1 == tmp2);
+ }
+ last = (TUint8*)p[31] + _ALIGN_UP(Max((al + RHeap::EAllocCellSize), min), h->Align());
+ last_len = h->FreeCellLen(last);
+ test(last_len > 0);
+ if (pad)
+ {
+ test(h->TestAlloc(last_len) == last);
+ test(h->FreeRef().next == NULL);
+ }
+ else
+ last = NULL;
+ top = h->Top();
+ for (i=0,heapReduced=EFalse; i<32; ++i)
+ {
+ h->TestFree(p[i]);
+ TInt fl = h->FreeCellLen(p[i]);
+ TInt xfl = _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ {
+ top = h->Top();
+ heapReduced = ETrue;
+ }
+
+ if (i < 31 || pad)
+ test(fl == xfl);
+ else
+ {
+ if (!heapReduced)
+ test(fl == xfl + RHeap::EAllocCellSize + last_len);
+ else
+ {
+ heapReduced = EFalse;
+ }
+ }
+ test(h->TestAlloc(al)==p[i]);
+ }
+ for (i=0,heapReduced=EFalse; i<31; ++i)
+ {
+ TInt j = i+1;
+ TUint8* q;
+ // Free to adjacent cells and check that the free cell left is the combined
+ // size of the 2 adjacent cells just freed
+ h->TestFree(p[i]);
+ h->TestFree(p[j]);
+ TInt fl = h->FreeCellLen(p[i]);
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ {
+ top = h->Top();
+ heapReduced = ETrue;
+ }
+ TInt xfl = 2 * _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
+ if (j < 31 || pad)
+ test(fl == xfl);
+ else
+ {
+ if (!heapReduced)
+ test(fl == xfl + RHeap::EAllocCellSize + last_len);
+ else
+ {
+ heapReduced = EFalse;
+ }
+ }
+ test(h->FreeCellLen(p[j]) < 0);
+ test(h->TestAlloc(fl)==p[i]);
+ test(h->Top() == top);
+ h->TestFree(p[i]);
+ test(h->FreeCellLen(p[i]) == fl);
+ // test when you alloc a cell that is larger than cells just freed
+ // that its position is not the same as the freed cells
+ // will hold for all cells except top/last one
+ if (j < 31 && !pad && fl < last_len)
+ {
+ q = (TUint8*)h->TestAlloc(fl+1);
+ if (h->Top() > top)
+ top = h->Top();
+ test(h->Top() == top);
+ test(q > p[i]);
+ h->TestFree(q);
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ {
+ top = h->Top();
+ heapReduced = ETrue;
+ }
+ }
+ // check cell that is just smaller than space but not small enough
+ // for a new free cell to be created, is the size of whole free cell
+ test(h->TestAlloc(fl-min+1)==p[i]);
+ test(h->Top() == top);
+ test(h->AllocLen(p[i])==fl);
+ h->TestFree(p[i]);
+ // Check cell that is small enough for new free cell and alloc'd cell to be
+ // created at p[i] cell is created at p[i]
+ test(h->TestAlloc(fl-min)==p[i]);
+ test(h->Top() == top);
+ // check free cell is at expected position
+ q = (TUint8*)p[i] + fl - min + RHeap::EAllocCellSize;
+ test(h->FreeCellLen(q) == min - RHeap::EAllocCellSize);
+ // alloc 0 length cell at q, will work as new cell of min length will be created
+ test(h->TestAlloc(0) == q);
+ test(h->Top() == top);
+ h->TestFree(p[i]);
+ test(h->FreeCellLen(p[i]) == fl - min);
+ h->TestFree(q);
+ // again check free cells are combined
+ test(h->FreeCellLen(q) < 0);
+ test(h->FreeCellLen(p[i]) == fl);
+ // check reallocating the cells places them back to same positions
+ test(h->TestAlloc(al)==p[i]);
+ test(h->Top() == top);
+ test(h->TestAlloc(al)==p[j]);
+ test(h->Top() == top);
+ if (pad)
+ test(h->FreeRef().next == NULL);
+ }
+ for (i=0,heapReduced=EFalse; i<30; ++i)
+ {
+ TInt j = i+1;
+ TInt k = i+2;
+ TUint8* q;
+ // Free 3 adjacent cells and check free cell created is combined size
+ h->TestFree(p[i]);
+ h->TestFree(p[k]);
+ h->TestFree(p[j]);
+ h->FullCheck();
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ {
+ top = h->Top();
+ heapReduced = ETrue;
+ }
+ TInt fl = h->FreeCellLen(p[i]);
+ TInt xfl = 3 * _ALIGN_UP(Max((al + RHeap::EAllocCellSize), h->MinCell()), h->Align()) - RHeap::EAllocCellSize;
+ if (k < 31 || pad)
+ test(fl == xfl);
+ else
+ {
+ if (!heapReduced)
+ test(fl == xfl + RHeap::EAllocCellSize + last_len);
+ else
+ {
+ heapReduced = EFalse;
+ }
+ }
+ test(h->FreeCellLen(p[j]) < 0);
+ test(h->FreeCellLen(p[k]) < 0);
+ //ensure created free cell is allocated to new cell of free cell size
+ test(h->TestAlloc(fl)==p[i]);
+ test(h->Top() == top);
+ h->TestFree(p[i]);
+ test(h->FreeCellLen(p[i]) == fl);
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ top = h->Top();
+ if (k < 31 && !pad && fl < last_len)
+ {
+ // Test new cell one larger than free cell size is allocated somewhere else
+ q = (TUint8*)h->TestAlloc(fl+1);
+ if (h->Top() > top)
+ top = h->Top();
+ test(h->Top() == top);
+ test(q > p[i]);
+ h->TestFree(q);
+ if (h->Top() < top) // heap was reduced due to small KHeapShrinkHysRatio and big KHeapMinCellSize
+ {
+ top = h->Top();
+ heapReduced = ETrue;
+ }
+ }
+ // check allocating cell just smaller than free cell size but
+ // too large for neew free cell to be created, is size of whole free cell
+ test(h->TestAlloc(fl-min+1)==p[i]);
+ test(h->Top() == top);
+ test(h->AllocLen(p[i])==fl);
+ h->TestFree(p[i]);
+ // ensure free cell is created this time as well as alloc'd cell
+ test(h->TestAlloc(fl-min)==p[i]);
+ test(h->Top() == top);
+ q = (TUint8*)p[i] + fl - min + RHeap::EAllocCellSize;
+ test(h->FreeCellLen(q) == min - RHeap::EAllocCellSize);
+ test(h->TestAlloc(0) == q);
+ test(h->Top() == top);
+ h->TestFree(p[i]);
+ test(h->FreeCellLen(p[i]) == fl - min);
+ h->TestFree(q);
+ test(h->FreeCellLen(q) < 0);
+ test(h->FreeCellLen(p[i]) == fl);
+ // realloc all cells and check heap not expanded
+ test(h->TestAlloc(al)==p[i]);
+ test(h->Top() == top);
+ test(h->TestAlloc(al)==p[j]);
+ test(h->Top() == top);
+ test(h->TestAlloc(al)==p[k]);
+ test(h->Top() == top);
+ // If padding than no space should left on heap
+ if (pad)
+ test(h->FreeRef().next == NULL);
+ }
+ // when padding this will free padding from top of heap
+ h->TestFree(last);
+ }
+ h->FullCheck();
+ }
+
+void DoTest3(RHeap* aH)
+ {
+ RTestHeap* h = (RTestHeap*)aH;
+ test.Printf(_L("Test ReAlloc: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
+ // allocate continuous heap cell, then free them and reallocate again
+ TInt al;
+ for (al=1; al<256; al+=al+1)
+ {
+ TAny* p0 = h->TestAlloc(al);
+ TInt al0 = h->AllocLen(p0);
+ h->TestFree(p0);
+ TAny* p1 = h->TestReAlloc(NULL, al, 0);
+ TInt al1 = h->AllocLen(p1);
+ test(p1 == p0);
+ test(al1 == al0);
+ h->TestFree(p1);
+ TAny* p2 = h->TestAlloc(1);
+ TAny* p3 = h->TestReAlloc(p2, al, 0);
+ test(p3 == p0);
+ TInt al3 = h->AllocLen(p3);
+ test(al3 == al0);
+ h->TestFree(p3);
+ TAny* p4 = h->TestAlloc(1024);
+ TAny* p5 = h->TestReAlloc(p4, al, 0);
+ test(p5 == p0);
+ TInt al5 = h->AllocLen(p5);
+ test(al5 == al0);
+ h->TestFree(p5);
+ }
+ TInt i;
+ TInt j;
+ for (j=0; j<30; j+=3)
+ {
+ TAny* p[30];
+ TInt ala[30];
+ TInt fla[30];
+ h->Reset();
+ for (i=0; i<30; ++i)
+ {
+ p[i] = h->TestAlloc(8*i*i);
+ ala[i] = h->AllocLen(p[i]);
+ fla[i] = 0;
+ }
+ for (i=1; i<30; i+=3)
+ {
+ h->TestFree(p[i]);
+ fla[i] = h->FreeCellLen(p[i]);
+ test(fla[i] == ala[i]);
+ test(h->FreeCellLen(p[i-1]) < 0);
+ test(h->FreeCellLen(p[i+1]) < 0);
+ }
+ h->FullCheck();
+ TInt al1 = _ALIGN_UP(Max((RHeap::EAllocCellSize + 1), h->MinCell()), h->Align());
+ // adjust al1 for some case when reallocated heap cell will not be shrinked because remainder will not big enough
+ // to form a new free cell due to a big KHeapMinCellSize value
+ TInt alaj = ala[j] + RHeap::EAllocCellSize;
+ if (al1 < alaj && alaj - al1 < h->MinCell())
+ al1 = alaj;
+ TAny* p1 = h->TestReAlloc(p[j], 1, RHeap::ENeverMove);
+ test(p1 == p[j]);
+ test(h->AllocLen(p1) == al1 - RHeap::EAllocCellSize);
+ TAny* p1b = (TUint8*)p1 + al1;
+ test(h->FreeCellLen(p1b) == fla[j+1] + RHeap::EAllocCellSize + ala[j] - al1);
+ TInt l2 = ala[j] + fla[j+1] + RHeap::EAllocCellSize; // max without moving
+ TInt l3 = l2 - h->MinCell();
+ TAny* p3 = h->TestReAlloc(p[j], l3, RHeap::ENeverMove);
+ test(p3 == p[j]);
+ TAny* p3b = (TUint8*)p3 + h->AllocLen(p3) + RHeap::EAllocCellSize;
+ test(h->FreeCellLen(p3b) == h->MinCell() - RHeap::EAllocCellSize);
+ TAny* p2 = h->TestReAlloc(p[j], l2, RHeap::ENeverMove);
+ test(p2 == p[j]);
+ test(h->AllocLen(p2) == l2);
+ TAny* p4 = h->TestReAlloc(p[j], l2+1, RHeap::ENeverMove);
+ test(p4 == NULL);
+ test(h->AllocLen(p2) == l2);
+ TAny* p5 = h->TestReAlloc(p[j], l2+1, 0);
+ TInt k = 0;
+ for (; k<30 && fla[k] <= l2; ++k) {}
+ if (k < 30)
+ test(p5 == p[k]);
+ else
+ test(p5 >= (TUint8*)p[29] + ala[29]);
+ test(h->FreeCellLen(p2) == ala[j] + ala[j+1] + RHeap::EAllocCellSize);
+ TInt ali = _ALIGN_UP(RHeap::EAllocCellSize,h->Align());
+ TAny* p6b = (TUint8*)p[j+2] + ala[j+2] - ali + RHeap::EAllocCellSize;
+ test(h->FreeCellLen(p6b) < 0);
+ TAny* p6 = h->TestReAlloc(p[j+2], ala[j+2] - ali , 0);
+ test(p6 == p[j+2]);
+ if (h->AllocLen(p6) != ala[j+2]) // allocated heap cell size changed
+ test(h->FreeCellLen(p6b) == h->MinCell() - RHeap::EAllocCellSize);
+ TInt g = h->GrowBy();
+ TAny* p7 = h->TestReAlloc(p5, 8*g, 0);
+ test(p7 >= p5);
+ TUint8* p8 = (TUint8*)p7 - RHeap::EAllocCellSize + al1;
+ TUint8* p9 = (TUint8*)_ALIGN_UP(TLinAddr(p8), h->PageSize());
+ if (p9-p8 < h->MinCell())
+ p9 += h->PageSize();
+ TAny* p7b = h->TestReAlloc(p7, 1, 0);
+ test(p7b == p7);
+ test(h->Top() + (RHeap::EAllocCellSize & (h->Align()-1)) == p9);
+
+ h->FullCheck();
+ }
+ }
+
+// Test compression
+// {1 free cell, >1 free cell} x {reduce cell, eliminate cell, reduce cell but too small}
+//
+void DoTest4(RHeap* aH)
+ {
+ RTestHeap* h = (RTestHeap*)aH;
+ test.Printf(_L("Test Compress: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
+ TInt page_size;
+ UserHal::PageSizeInBytes(page_size);
+ test(page_size == h->PageSize());
+ TInt g = h->GrowBy();
+ TEST_ALIGN(g, page_size);
+ test(g >= page_size);
+ RChunk c;
+ c.SetHandle(h->ChunkHandle());
+ TInt align = h->Align();
+ TInt minc = h->MinCell();
+
+ TInt orig_size = c.Size();
+ TUint8* orig_top = h->Top();
+
+ // size in bytes that last free cell on the top of the heap must be
+ // before the heap will be shrunk, size must include the no of bytes to
+ // store the cell data/header i.e RHeap::EAllocCellSize
+ TInt shrinkThres = KHeapShrinkHysRatio*(g>>8);
+
+ TInt pass;
+ for (pass=0; pass<2; ++pass)
+ {
+ TUint8* p0 = (TUint8*)h->TestAlloc(4);
+ test(p0 == h->Base() + RHeap::EAllocCellSize);
+ TInt l1 = h->Top() - (TUint8*)h->FreeRef().next;
+ TEST_ALIGN(l1, align);
+ l1 -= RHeap::EAllocCellSize;
+ TUint8* p1;
+ // Grow heap by 2*iGrowBy bytes
+ p1 = (TUint8*)h->TestAlloc(l1 + 2*g);
+ test(p1 == p0 + h->AllocLen(p0) + RHeap::EAllocCellSize);
+ test(h->Top() - orig_top == 2*g);
+ test(c.Size() - orig_size == 2*g);
+ // May compress heap, may not
+ h->TestFree(p1);
+ h->ForceCompress(2*g);
+ test(h->Top() == orig_top);
+ test(c.Size() == orig_size);
+ test((TUint8*)h->FreeRef().next == p1 - RHeap::EAllocCellSize);
+ h->FullCheck();
+ //if KHeapShrinkHysRatio is > 2.0 then heap compression will occur here
+ test(h->Compress() == 0);
+ test(h->TestAlloc(l1) == p1);
+ test(h->FreeRef().next == NULL);
+ if (pass)
+ h->TestFree(p0); // leave another free cell on second pass
+ TInt l2 = g - RHeap::EAllocCellSize;
+ // Will grow heap by iGrowBy bytes
+ TUint8* p2 = (TUint8*)h->TestAlloc(l2);
+ test(p2 == orig_top + RHeap::EAllocCellSize);
+ test(h->Top() - orig_top == g);
+ test(c.Size() - orig_size == g);
+ // may or may not compress heap
+ h->TestFree(p2);
+ if (l2+RHeap::EAllocCellSize >= shrinkThres)
+ {
+ // When KHeapShrinkRatio small enough heap will have been compressed
+ test(h->Top() == orig_top);
+ if (pass)
+ {
+ test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
+ test((TUint8*)h->FreeRef().next->next == NULL);
+ }
+ else
+ test((TUint8*)h->FreeRef().next == NULL);
+ }
+ else
+ {
+ test(h->Top() - orig_top == g);
+ if (pass)
+ {
+ test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
+ test((TUint8*)h->FreeRef().next->next == orig_top);
+ }
+ else
+ test((TUint8*)h->FreeRef().next == orig_top);
+ }
+ // this compress will only do anything if the KHeapShrinkRatio is large
+ // enough to introduce hysteresis otherwise the heap would have been compressed
+ // by the free operation itself
+ TInt tmp1,tmp2;
+ tmp2=h->CalcComp(g);
+ tmp1=h->Compress();
+ test(tmp1 == tmp2);
+ test(h->Top() == orig_top);
+ test(c.Size() == orig_size);
+ h->FullCheck();
+ // shouldn't compress heap as already compressed
+ test(h->Compress() == 0);
+ //grow heap by iGrowBy bytes
+ test(h->TestAlloc(l2) == p2);
+ //grow heap by iGrowBy bytes
+ TUint8* p3 = (TUint8*)h->TestAlloc(l2);
+ test(p3 == p2 + g);
+ test(h->Top() - orig_top == 2*g);
+ test(c.Size() - orig_size == 2*g);
+ // may or may not reduce heap
+ h->TestFree(p2);
+ // may or may not reduce heap
+ h->TestFree(p3);
+ h->ForceCompress(2*g);
+ test(h->Top() == orig_top);
+ test(c.Size() == orig_size);
+ h->FullCheck();
+ if (pass)
+ {
+ test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
+ test((TUint8*)h->FreeRef().next->next == NULL);
+ }
+ else
+ test((TUint8*)h->FreeRef().next == NULL);
+ //grow heap by iGrowBy bytes
+ test(h->TestAlloc(l2) == p2);
+ //grow heap by iGrowBy*2 + page size bytes
+ test(h->TestAlloc(l2 + g + page_size) == p3);
+ test(h->Top() - orig_top == 4*g);
+ test(c.Size() - orig_size == 4*g);
+ // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatioDflt
+ test(h->TestReAlloc(p3, page_size - RHeap::EAllocCellSize, 0) == p3);
+ h->ForceCompress(g+page_size);
+ test(h->Top() - orig_top == g + page_size);
+ test(c.Size() - orig_size == g + page_size);
+ h->FullCheck();
+ // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatio1
+ h->TestFree(p2);
+ // will compress heap if KHeapShrinkHysRatio <= KHeapShrinkRatio1 && g<=page_size
+ // or KHeapShrinkHysRatio >= 2.0 and g==page_size
+ h->TestFree(p3);
+ // may or may not perform further compression
+ tmp1=h->CalcComp(g+page_size);
+ tmp2=h->Compress();
+ test(tmp1 == tmp2);
+ test(h->Top() == orig_top);
+ test(c.Size() == orig_size);
+ h->FullCheck();
+ test(h->TestAlloc(l2 - minc) == p2);
+ test(h->TestAlloc(l2 + g + page_size + minc) == p3 - minc);
+ test(h->Top() - orig_top == 4*g);
+ test(c.Size() - orig_size == 4*g);
+ h->TestFree(p3 - minc);
+ h->ForceCompress(l2 + g + page_size + minc);
+ test(h->Top() - orig_top == g);
+ test(c.Size() - orig_size == g);
+ h->FullCheck();
+ if (pass)
+ {
+ test((TUint8*)h->FreeRef().next == p0 - RHeap::EAllocCellSize);
+ test((TUint8*)h->FreeRef().next->next == p3 - minc - RHeap::EAllocCellSize);
+ }
+ else
+ test((TUint8*)h->FreeRef().next == p3 - minc - RHeap::EAllocCellSize);
+ h->TestFree(p2);
+ if (l2+RHeap::EAllocCellSize >= shrinkThres)
+ {
+ // When KHeapShrinkRatio small enough heap will have been compressed
+ test(h->Top() == orig_top);
+ test(c.Size() - orig_size == 0);
+ }
+ else
+ {
+ test(h->Top() - orig_top == g);
+ test(c.Size() - orig_size == g);
+ }
+ h->FullCheck();
+ if ( ((TLinAddr)orig_top & (align-1)) == 0)
+ {
+ TAny* free;
+ TEST_ALIGN(p2 - RHeap::EAllocCellSize, page_size);
+ // will have free space of g-minc
+ test(h->TestAlloc(l2 + minc) == p2);
+ test(h->Top() - orig_top == 2*g);
+ test(c.Size() - orig_size == 2*g);
+ free = pass ? h->FreeRef().next->next : h->FreeRef().next;
+ test(free != NULL);
+ test(h->TestReAlloc(p2, l2 - 4, 0) == p2);
+ TInt freeSp = g-minc + (l2+minc - (l2-4));
+ TInt adjust = 0;
+ if (freeSp >= shrinkThres && freeSp-page_size >= minc)
+ {
+ // if page_size is less than growBy (g) then heap will be shrunk
+ // by less than a whole g.
+ adjust = g-((page_size<g)?page_size:0);
+ }
+ test(h->Top() - orig_top == 2*g - adjust);
+ test(c.Size() - orig_size == 2*g - adjust);
+ free = pass ? h->FreeRef().next->next : h->FreeRef().next;
+ test(free != NULL);
+ TEST_ALIGN(TLinAddr(free)+4, page_size);
+ test(h->TestAlloc(l2 + g + page_size + 4) == p3 - 4);
+ test(h->Top() - orig_top == 4*g - adjust);
+ test(c.Size() - orig_size == 4*g - adjust);
+ h->TestFree(p3 - 4);
+ h->ForceCompress(l2 + g + page_size + 4);
+ test(h->Top() - orig_top == g + page_size);
+ test(c.Size() - orig_size == g + page_size);
+ h->FullCheck();
+ h->TestFree(p2);
+ h->ForceCompress(l2-4);
+ test(h->Compress() == 0);
+ // check heap is grown, will have free space of g-minc
+ test(h->TestAlloc(l2 + minc) == p2);
+ test(h->Top() - orig_top == 2*g);
+ test(c.Size() - orig_size == 2*g);
+ free = pass ? h->FreeRef().next->next : h->FreeRef().next;
+ test(free != NULL);
+ // may shrink heap as will now have g+minc free bytes
+ test(h->TestReAlloc(p2, l2 - minc, 0) == p2);
+ if (g+minc >= shrinkThres)
+ {
+ test(h->Top() - orig_top == g);
+ test(c.Size() - orig_size == g);
+ }
+ else
+ {
+ test(h->Top() - orig_top == 2*g);
+ test(c.Size() - orig_size == 2*g);
+ }
+ free = pass ? h->FreeRef().next->next : h->FreeRef().next;
+ test(free != NULL);
+ TEST_ALIGN(TLinAddr(free)+minc, page_size);
+ test(h->TestAlloc(l2 + g + page_size + minc) == p3 - minc);
+ test(h->Top() - orig_top == 4*g);
+ test(c.Size() - orig_size == 4*g);
+ h->TestFree(p3 - minc);
+ h->ForceCompress(l2 + g + page_size + minc);
+ test(h->Top() - orig_top == g);
+ test(c.Size() - orig_size == g);
+ h->FullCheck();
+ h->TestFree(p2);
+ }
+
+ h->TestFree(p1);
+ if (pass == 0)
+ h->TestFree(p0);
+ h->Compress();
+ }
+ h->FullCheck();
+ }
+
+void Test1()
+ {
+ RHeap* h;
+ h = RTestHeap::FixedHeap(0x1000, 0);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = RTestHeap::FixedHeap(0x1000, 0, EFalse);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = RTestHeap::FixedHeap(0x10000, 64);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = RTestHeap::FixedHeap(0x100000, 4096);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = RTestHeap::FixedHeap(0x100000, 8192);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x1000, 0x1000, 4);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x10000, 0x1000, 4);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 4096);
+ test(h != NULL);
+ DoTest1(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 4);
+ test(h != NULL);
+ DoTest1(h);
+ h->Reset();
+ DoTest2(h);
+ h->Reset();
+ DoTest3(h);
+ h->Reset();
+ DoTest4(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 8);
+ test(h != NULL);
+ DoTest1(h);
+ h->Reset();
+ DoTest2(h);
+ h->Reset();
+ DoTest3(h);
+ h->Reset();
+ DoTest4(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 16);
+ test(h != NULL);
+ DoTest1(h);
+ h->Reset();
+ DoTest2(h);
+ h->Reset();
+ DoTest3(h);
+ h->Reset();
+ DoTest4(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x1000, 0x100000, 0x1000, 32);
+ test(h != NULL);
+ DoTest1(h);
+ h->Reset();
+ DoTest2(h);
+ h->Reset();
+ DoTest3(h);
+ h->Reset();
+ DoTest4(h);
+ h->Close();
+ h = UserHeap::ChunkHeap(&KNullDesC(), 0x3000, 0x100000, 0x3000, 4);
+ test(h != NULL);
+ DoTest1(h);
+ h->Reset();
+ DoTest2(h);
+ h->Reset();
+ DoTest3(h);
+ h->Reset();
+ DoTest4(h);
+ h->Close();
+ }
+
struct SHeapStress
{
RThread iThread;
@@ -384,7 +1277,8 @@
void DoStressTest1(RAllocator* aAllocator)
{
RTestHeap* h = (RTestHeap*)aAllocator;
- test.Printf(_L("Test Stress 1: max=%x\n"), h->MaxLength());
+ test.Printf(_L("Test Stress 1: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
SHeapStress hs;
hs.iSeed = 0xb504f334;
hs.iAllocator = aAllocator;
@@ -398,7 +1292,8 @@
void DoStressTest2(RAllocator* aAllocator)
{
RTestHeap* h = (RTestHeap*)aAllocator;
- test.Printf(_L("Test Stress 2: max=%x\n"), h->MaxLength());
+ test.Printf(_L("Test Stress 2: min=%x max=%x align=%d growby=%d\n"),
+ h->MinLength(), h->MaxLength(), h->Align(), h->GrowBy());
SHeapStress hs1;
SHeapStress hs2;
hs1.iSeed = 0xb504f334;
@@ -435,31 +1330,39 @@
TInt TestHeapGrowInPlace(TInt aMode)
{
TBool reAllocs=EFalse;
+ TBool heapGrew=EFalse;
+
RHeap* myHeap;
- //
- // Fixed DL heap used.
- //
- myHeap = UserHeap::ChunkHeap(NULL,0x4000,0x4000,0x1000);
+
+ myHeap = UserHeap::ChunkHeap(NULL,0x1000,0x4000,0x1000);
TAny *testBuffer,*testBuffer2;
// Start size chosen so that 1st realloc will use up exactly all the heap.
// Later iterations wont, and there will be a free cell at the end of the heap.
- TInt currentSize = ((0x800) - KSizeOfHeap) - KAllocCellSize;
+ TInt currentSize = ((0x800) - sizeof(RHeap)) - RHeap::EAllocCellSize;
TInt growBy = 0x800;
-
+ TInt newSpace, space;
+
testBuffer2 = myHeap->Alloc(currentSize);
+ newSpace = myHeap->Size();
do
{
- testBuffer = testBuffer2;
+ space = newSpace;
+ testBuffer = testBuffer2;
currentSize+=growBy;
testBuffer2 = myHeap->ReAlloc(testBuffer,currentSize,aMode);
+ newSpace = myHeap->Size();
+
if (testBuffer2)
{
if (testBuffer!=testBuffer2)
reAllocs = ETrue;
+
+ if (newSpace>space)
+ heapGrew = ETrue;
}
growBy-=16;
} while (testBuffer2);
@@ -474,6 +1377,11 @@
test.Printf(_L("Failure - Memory was moved!\n"));
return -100;
}
+ if (!heapGrew)
+ {
+ test.Printf(_L("Failure - Heap Never Grew!\n"));
+ return -200;
+ }
if (currentSize<= 0x3000)
{
test.Printf(_L("Failed to grow by a reasonable amount!\n"));
@@ -532,51 +1440,15 @@
TestDEF078391Heap->Close();
}
-void PageBitmapGrowTest()
- {
- // Create a large heap to allocate 4 Mb memory (64 * 68 kb).
- test.Next(_L("Allocate 64 * 68 kbytes to cause page bitmap growing"));
- RHeap* myHeap;
- myHeap = UserHeap::ChunkHeap(NULL,0x1000,0x500000,0x1000);
- test(myHeap!=NULL);
- TInt OrigSize = myHeap->Size();
- TUint8* cell[64];
- // allocate all cells
- TInt i;
- RTestHeap* h = (RTestHeap*)myHeap;
- for (i=0; i<64; ++i)
- {
- cell[i] = (TUint8*)h->TestAlloc(0x11000);
- test(cell[i]!=NULL);
- }
- h->FullCheck();
-
- // Release all allocated buffers by reseting heap
- TInt Size = myHeap->Size();
- test(Size > 0x400000);
- myHeap->Reset();
- TInt Count = myHeap->AllocSize(Size);
- test(Count==0);
- test(Size==0);
- Size = myHeap->Size();
- test(Size==OrigSize);
-
- h->Close();
-
- }
-
TInt E32Main()
{
test.Title();
__KHEAP_MARK;
test.Start(_L("Testing heaps"));
TestDEF078391();
+ Test1();
StressTests();
ReAllocTests();
- //
- // Some special tests for slab- and paged allocator
- //
- PageBitmapGrowTest();
test.End();
__KHEAP_MARKEND;
return 0;