116 test(r==KErrNone); |
129 test(r==KErrNone); |
117 test(FreeRam()==free); |
130 test(FreeRam()==free); |
118 |
131 |
119 } |
132 } |
120 |
133 |
|
134 |
|
135 struct SPhysAllocData |
|
136 { |
|
137 TUint iSize; |
|
138 TUint iAlign; |
|
139 TBool iCheckMaxAllocs; |
|
140 TBool iCheckFreeRam; |
|
141 }; |
|
142 |
|
143 |
|
144 TInt FillPhysicalRam(TAny* aArgs) |
|
145 { |
|
146 SPhysAllocData& allocData = *((SPhysAllocData*)aArgs); |
|
147 TUint maxAllocs = FreeRam() / allocData.iSize; |
|
148 TUint32* physAddrs = new TUint32[maxAllocs + 1]; |
|
149 if (!physAddrs) |
|
150 return KErrNoMemory; |
|
151 TUint32* pa = physAddrs; |
|
152 TUint32 alignMask = (1 << allocData.iAlign) - 1; |
|
153 TUint initialFreeRam = FreeRam(); |
|
154 TInt r = KErrNone; |
|
155 TUint allocations = 0; |
|
156 for(; allocations <= maxAllocs; ++allocations) |
|
157 { |
|
158 TUint freeRam = FreeRam(); |
|
159 r = AllocPhysicalRam(*pa, allocData.iSize, allocData.iAlign); |
|
160 if (r != KErrNone) |
|
161 break; |
|
162 if (*pa++ & alignMask) |
|
163 { |
|
164 r = KErrGeneral; |
|
165 RDebug::Printf("Error alignment phys addr 0x%08x", *(pa - 1)); |
|
166 break; |
|
167 } |
|
168 if (allocData.iCheckFreeRam && freeRam - allocData.iSize != (TUint)FreeRam()) |
|
169 { |
|
170 r = KErrGeneral; |
|
171 RDebug::Printf("Error in free ram 0x%08x orig 0x%08x", FreeRam(), freeRam); |
|
172 break; |
|
173 } |
|
174 } |
|
175 |
|
176 TUint32* physEnd = pa; |
|
177 TBool failFrees = EFalse; |
|
178 for (pa = physAddrs; pa < physEnd; pa++) |
|
179 { |
|
180 if (FreePhysicalRam(*pa, allocData.iSize) != KErrNone) |
|
181 failFrees = ETrue; |
|
182 } |
|
183 if (failFrees) |
|
184 r = KErrNotFound; |
|
185 if (allocData.iCheckMaxAllocs && allocations > maxAllocs) |
|
186 { |
|
187 r = KErrOverflow; |
|
188 RDebug::Printf("Error able to allocate too many pages"); |
|
189 } |
|
190 if (allocData.iCheckFreeRam && initialFreeRam != (TUint)FreeRam()) |
|
191 { |
|
192 r = KErrGeneral; |
|
193 RDebug::Printf("Error in free ram 0x%08x initial 0x%08x", FreeRam(), initialFreeRam); |
|
194 } |
|
195 delete[] physAddrs; |
|
196 if (r != KErrNone && r != KErrNoMemory) |
|
197 return r; |
|
198 TUint possibleAllocs = initialFreeRam / allocData.iSize; |
|
199 if (allocData.iCheckMaxAllocs && possibleAllocs != allocations) |
|
200 { |
|
201 RDebug::Printf("Error in number of allocations possibleAllocs %d allocations %d", possibleAllocs, allocations); |
|
202 return KErrGeneral; |
|
203 } |
|
204 return allocations; |
|
205 } |
|
206 |
|
207 |
|
208 void TestMultipleContiguousAllocations(TUint aNumThreads, TUint aSize, TUint aAlign) |
|
209 { |
|
210 test.Printf(_L("TestMultiContig threads %d size 0x%x, align %d\n"), aNumThreads, aSize, aAlign); |
|
211 SPhysAllocData allocData; |
|
212 allocData.iSize = aSize; |
|
213 allocData.iAlign = aAlign; |
|
214 allocData.iCheckMaxAllocs = EFalse; |
|
215 allocData.iCheckFreeRam = EFalse; |
|
216 // Start several threads all contiguous allocating memory. |
|
217 RThread* threads = new RThread[aNumThreads]; |
|
218 TRequestStatus* status = new TRequestStatus[aNumThreads]; |
|
219 TUint i = 0; |
|
220 for (; i < aNumThreads; i++) |
|
221 {// Need enough heap to store addr of every possible allocation + 1. |
|
222 TUint requiredHeapMax = Max(PageSize, ((InitFreeRam / aSize) / sizeof(TUint32)) + sizeof(TUint32)); |
|
223 TInt r = threads[i].Create(KNullDesC, FillPhysicalRam, KDefaultStackSize, PageSize, requiredHeapMax, (TAny*)&allocData); |
|
224 test_KErrNone(r); |
|
225 threads[i].Logon(status[i]); |
|
226 } |
|
227 for (i = 0; i < aNumThreads; i++) |
|
228 { |
|
229 threads[i].Resume(); |
|
230 } |
|
231 for (i = 0; i < aNumThreads; i++) |
|
232 { |
|
233 User::WaitForRequest(status[i]); |
|
234 test_Equal(EExitKill, threads[i].ExitType()); |
|
235 TInt exitReason = threads[i].ExitReason(); |
|
236 test_Value(exitReason, exitReason >= 0 || exitReason == KErrNoMemory); |
|
237 threads[i].Close(); |
|
238 } |
|
239 delete[] status; |
|
240 delete[] threads; |
|
241 } |
|
242 |
|
243 struct STouchData |
|
244 { |
|
245 TUint iSize; |
|
246 TUint iFrequency; |
|
247 }TouchData; |
|
248 |
|
249 |
|
250 TInt TouchMemory(TAny*) |
|
251 { |
|
252 while (!TouchDataStop) |
|
253 { |
|
254 TUint8* p = Chunk.Base(); |
|
255 TUint8* pEnd = p + ChunkCommitEnd; |
|
256 TUint8* fragPEnd = p + TouchData.iFrequency; |
|
257 for (TUint8* fragP = p + TouchData.iSize; fragPEnd < pEnd;) |
|
258 { |
|
259 TUint8* data = fragP; |
|
260 for (; data < fragPEnd; data += PageSize) |
|
261 { |
|
262 *data = (TUint8)(data - fragP); |
|
263 } |
|
264 for (data = fragP; data < fragPEnd; data += PageSize) |
|
265 { |
|
266 if (*data != (TUint8)(data - fragP)) |
|
267 { |
|
268 RDebug::Printf("Error unexpected data 0x%x read from 0x%08x", *data, data); |
|
269 return KErrGeneral; |
|
270 } |
|
271 } |
|
272 fragP = fragPEnd + TouchData.iSize; |
|
273 fragPEnd += TouchData.iFrequency; |
|
274 } |
|
275 } |
|
276 return KErrNone; |
|
277 } |
|
278 |
|
279 struct SFragData |
|
280 { |
|
281 TUint iSize; |
|
282 TUint iFrequency; |
|
283 TUint iDiscard; |
|
284 TBool iFragThread; |
|
285 }FragData; |
|
286 |
|
287 void FragmentMemoryFunc() |
|
288 { |
|
289 ChunkCommitEnd = 0; |
|
290 TInt r; |
|
291 while(KErrNone == (r = Chunk.Commit(ChunkCommitEnd,PageSize)) && !FragThreadStop) |
|
292 { |
|
293 ChunkCommitEnd += PageSize; |
|
294 } |
|
295 if (FragThreadStop) |
|
296 return; |
|
297 test_Equal(KErrNoMemory, r); |
|
298 TUint freeBlocks = 0; |
|
299 for ( TUint offset = 0; |
|
300 (offset + FragData.iSize) < ChunkCommitEnd; |
|
301 offset += FragData.iFrequency, freeBlocks++) |
|
302 { |
|
303 test_KErrNone(Chunk.Decommit(offset, FragData.iSize)); |
|
304 } |
|
305 if (!FragData.iFragThread) |
|
306 test_Equal(FreeRam(), freeBlocks * FragData.iSize); |
|
307 |
|
308 if (FragData.iDiscard && CacheSizeAdjustable && !FragThreadStop) |
|
309 { |
|
310 TUint minCacheSize = FreeRam(); |
|
311 TUint maxCacheSize = minCacheSize; |
|
312 TUint currentCacheSize; |
|
313 test_KErrNone(DPTest::CacheSize(OrigMinCacheSize, OrigMaxCacheSize, currentCacheSize)); |
|
314 test_KErrNone(DPTest::SetCacheSize(minCacheSize, maxCacheSize)); |
|
315 test_KErrNone(DPTest::SetCacheSize(OrigMinCacheSize, maxCacheSize)); |
|
316 } |
|
317 } |
|
318 |
|
319 |
|
320 void UnfragmentMemoryFunc() |
|
321 { |
|
322 if (FragData.iDiscard && CacheSizeAdjustable) |
|
323 test_KErrNone(DPTest::SetCacheSize(OrigMinCacheSize, OrigMaxCacheSize)); |
|
324 Chunk.Decommit(0, Chunk.MaxSize()); |
|
325 } |
|
326 |
|
327 |
|
328 TInt FragmentMemoryThreadFunc(TAny*) |
|
329 { |
|
330 while (!FragThreadStop) |
|
331 { |
|
332 FragmentMemoryFunc(); |
|
333 UnfragmentMemoryFunc(); |
|
334 } |
|
335 return KErrNone; |
|
336 } |
|
337 |
|
338 |
|
339 void FragmentMemory(TUint aSize, TUint aFrequency, TBool aDiscard, TBool aTouchMemory, TBool aFragThread) |
|
340 { |
|
341 test_Value(aTouchMemory, !aTouchMemory || !aFragThread); |
|
342 test_Value(aSize, aSize < aFrequency); |
|
343 FragData.iSize = aSize; |
|
344 FragData.iFrequency = aFrequency; |
|
345 FragData.iDiscard = aDiscard; |
|
346 FragData.iFragThread = aFragThread; |
|
347 |
|
348 TChunkCreateInfo chunkInfo; |
|
349 chunkInfo.SetDisconnected(0, 0, FreeRam()); |
|
350 chunkInfo.SetPaging(TChunkCreateInfo::EUnpaged); |
|
351 test_KErrNone(Chunk.Create(chunkInfo)); |
|
352 |
|
353 if (aFragThread) |
|
354 { |
|
355 TInt r = FragThread.Create(KNullDesC, FragmentMemoryThreadFunc, KDefaultStackSize, PageSize, PageSize, NULL); |
|
356 test_KErrNone(r); |
|
357 FragThread.Logon(FragStatus); |
|
358 FragThreadStop = EFalse; |
|
359 FragThread.Resume(); |
|
360 } |
|
361 else |
|
362 { |
|
363 FragmentMemoryFunc(); |
|
364 } |
|
365 if (aTouchMemory && !ManualTest) |
|
366 { |
|
367 TouchData.iSize = aSize; |
|
368 TouchData.iFrequency = aFrequency; |
|
369 TInt r = TouchThread.Create(KNullDesC, TouchMemory, KDefaultStackSize, PageSize, PageSize, NULL); |
|
370 test_KErrNone(r); |
|
371 TouchThread.Logon(TouchStatus); |
|
372 TouchDataStop = EFalse; |
|
373 TouchThread.Resume(); |
|
374 } |
|
375 } |
|
376 |
|
377 |
|
378 void UnfragmentMemory(TBool aDiscard, TBool aTouchMemory, TBool aFragThread) |
|
379 { |
|
380 test_Value(aTouchMemory, !aTouchMemory || !aFragThread); |
|
381 if (aTouchMemory && !ManualTest) |
|
382 { |
|
383 TouchDataStop = ETrue; |
|
384 User::WaitForRequest(TouchStatus); |
|
385 test_Equal(EExitKill, TouchThread.ExitType()); |
|
386 test_KErrNone(TouchThread.ExitReason()); |
|
387 CLOSE_AND_WAIT(TouchThread); |
|
388 } |
|
389 if (aFragThread) |
|
390 { |
|
391 FragThreadStop = ETrue; |
|
392 User::WaitForRequest(FragStatus); |
|
393 test_Equal(EExitKill, FragThread.ExitType()); |
|
394 test_KErrNone(FragThread.ExitReason()); |
|
395 CLOSE_AND_WAIT(FragThread); |
|
396 } |
|
397 else |
|
398 UnfragmentMemoryFunc(); |
|
399 CLOSE_AND_WAIT(Chunk); |
|
400 } |
|
401 |
|
402 |
|
403 void TestFillPhysicalRam(TUint aFragSize, TUint aFragFreq, TUint aAllocSize, TUint aAllocAlign, TBool aDiscard, TBool aTouchMemory) |
|
404 { |
|
405 test.Printf(_L("TestFillPhysicalRam aFragSize 0x%x aFragFreq 0x%x aAllocSize 0x%x aAllocAlign %d dis %d touch %d\n"), |
|
406 aFragSize, aFragFreq, aAllocSize, aAllocAlign, aDiscard, aTouchMemory); |
|
407 FragmentMemory(aFragSize, aFragFreq, aDiscard, aTouchMemory, EFalse); |
|
408 SPhysAllocData allocData; |
|
409 // Only check free all ram could be allocated in manual tests as fixed pages may be fragmented. |
|
410 allocData.iCheckMaxAllocs = (ManualTest && !aTouchMemory && !aAllocAlign)? ETrue : EFalse; |
|
411 allocData.iCheckFreeRam = ETrue; |
|
412 allocData.iSize = aAllocSize; |
|
413 allocData.iAlign = aAllocAlign; |
|
414 FillPhysicalRam(&allocData); |
|
415 UnfragmentMemory(aDiscard, aTouchMemory, EFalse); |
|
416 } |
|
417 |
|
418 |
|
419 void TestFragmentedAllocation() |
|
420 { |
|
421 // Test every other page free. |
|
422 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize, 0, EFalse, EFalse); |
|
423 if (ManualTest) |
|
424 { |
|
425 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize * 2, 0, EFalse, EFalse); |
|
426 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize, 0, EFalse, ETrue); |
|
427 } |
|
428 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize * 2, 0, EFalse, ETrue); |
|
429 // Test every 2 pages free. |
|
430 TestFillPhysicalRam(PageSize * 2, PageSize * 4, PageSize * 8, 0, EFalse, EFalse); |
|
431 if (ManualTest) |
|
432 TestFillPhysicalRam(PageSize * 2, PageSize * 4, PageSize * 8, 0, EFalse, ETrue); |
|
433 // Test 10 pages free then 20 pages allocated, allocate 256 pages (1MB in most cases). |
|
434 if (ManualTest) |
|
435 TestFillPhysicalRam(PageSize * 10, PageSize * 30, PageSize * 256, 0, EFalse, EFalse); |
|
436 TestFillPhysicalRam(PageSize * 10, PageSize * 30, PageSize * 256, 0, EFalse, ETrue); |
|
437 |
|
438 if (CacheSizeAdjustable) |
|
439 {// It is possible to adjust the cache size so test phyiscally contiguous |
|
440 // allocations discard and move pages when required. |
|
441 test.Next(_L("TestFragmentedAllocations with discardable data no true free memory")); |
|
442 // Test every other page free. |
|
443 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize, 0, ETrue, EFalse); |
|
444 if (ManualTest) |
|
445 { |
|
446 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize, 0, ETrue, ETrue); |
|
447 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize * 2, 0, ETrue, EFalse); |
|
448 } |
|
449 TestFillPhysicalRam(PageSize, PageSize * 2, PageSize * 2, 0, ETrue, ETrue); |
|
450 // Test every 2 pages free. |
|
451 TestFillPhysicalRam(PageSize * 2, PageSize * 4, PageSize * 8, 0, ETrue, EFalse); |
|
452 if (ManualTest) |
|
453 TestFillPhysicalRam(PageSize * 2, PageSize * 4, PageSize * 8, 0, ETrue, ETrue); |
|
454 // Test 10 pages free then 20 pages allocated, allocate 256 pages (1MB in most cases). |
|
455 if (ManualTest) |
|
456 TestFillPhysicalRam(PageSize * 10, PageSize * 30, PageSize * 256, 0, ETrue, EFalse); |
|
457 TestFillPhysicalRam(PageSize * 10, PageSize * 30, PageSize * 256, 0, ETrue, ETrue); |
|
458 } |
|
459 } |
|
460 |
|
461 |
121 GLDEF_C TInt E32Main() |
462 GLDEF_C TInt E32Main() |
122 // |
463 // |
123 // Test RAM allocation |
464 // Test RAM allocation |
124 // |
465 // |
125 { |
466 { |
146 TestAlignedAllocs(); |
509 TestAlignedAllocs(); |
147 |
510 |
148 test.Next(_L("TestClaimPhys")); |
511 test.Next(_L("TestClaimPhys")); |
149 TestClaimPhys(); |
512 TestClaimPhys(); |
150 |
513 |
|
514 if (memodel >= EMemModelTypeFlexible) |
|
515 { |
|
516 test.Next(_L("TestFragmentedAllocation")); |
|
517 TestFragmentedAllocation(); |
|
518 |
|
519 test.Next(_L("TestMultipleContiguousAllocations")); |
|
520 TestMultipleContiguousAllocations(20, PageSize * 16, 0); |
|
521 TestMultipleContiguousAllocations(20, PageSize * 16, PageShift + 1); |
|
522 TestMultipleContiguousAllocations(20, PageSize * 128, PageShift + 2); |
|
523 |
|
524 FragmentMemory(PageSize, PageSize * 2, EFalse, EFalse, EFalse); |
|
525 TestMultipleContiguousAllocations(20, PageSize * 128, PageShift + 2); |
|
526 UnfragmentMemory(EFalse, EFalse, EFalse); |
|
527 |
|
528 test.Next(_L("TestMultipleContiguousAllocations while accessing memory")); |
|
529 FragmentMemory(PageSize, PageSize * 2, EFalse, ETrue, EFalse); |
|
530 TestMultipleContiguousAllocations(20, PageSize * 128, PageShift + 2); |
|
531 UnfragmentMemory(EFalse, ETrue, EFalse); |
|
532 FragmentMemory(PageSize, PageSize * 2, ETrue, ETrue, EFalse); |
|
533 TestMultipleContiguousAllocations(50, PageSize * 256, PageShift + 5); |
|
534 UnfragmentMemory(ETrue, ETrue, EFalse); |
|
535 FragmentMemory(PageSize * 16, PageSize * 32, ETrue, ETrue, EFalse); |
|
536 TestMultipleContiguousAllocations(10, PageSize * 512, PageShift + 8); |
|
537 UnfragmentMemory(ETrue, ETrue, EFalse); |
|
538 FragmentMemory(PageSize * 32, PageSize * 64, ETrue, ETrue, EFalse); |
|
539 TestMultipleContiguousAllocations(10, PageSize * 1024, PageShift + 10); |
|
540 UnfragmentMemory(ETrue, ETrue, EFalse); |
|
541 |
|
542 test.Next(_L("TestMultipleContiguousAllocations with repeated movable and discardable allocations")); |
|
543 FragmentMemory(PageSize, PageSize * 2, EFalse, EFalse, ETrue); |
|
544 TestMultipleContiguousAllocations(20, PageSize * 2, PageShift); |
|
545 UnfragmentMemory(EFalse, EFalse, ETrue); |
|
546 FragmentMemory(PageSize, PageSize * 2, EFalse, EFalse, ETrue); |
|
547 TestMultipleContiguousAllocations(20, PageSize * 128, PageShift + 2); |
|
548 UnfragmentMemory(EFalse, EFalse, ETrue); |
|
549 FragmentMemory(PageSize, PageSize * 2, ETrue, EFalse, ETrue); |
|
550 TestMultipleContiguousAllocations(50, PageSize * 256, PageShift + 5); |
|
551 UnfragmentMemory(ETrue, EFalse, ETrue); |
|
552 FragmentMemory(PageSize * 16, PageSize * 32, ETrue, EFalse, ETrue); |
|
553 TestMultipleContiguousAllocations(20, PageSize * 512, PageShift + 8); |
|
554 UnfragmentMemory(ETrue, EFalse, ETrue); |
|
555 FragmentMemory(PageSize * 32, PageSize * 64, ETrue, EFalse, ETrue); |
|
556 TestMultipleContiguousAllocations(20, PageSize * 1024, PageShift + 10); |
|
557 UnfragmentMemory(ETrue, EFalse, ETrue); |
|
558 } |
|
559 |
151 Shadow.Close(); |
560 Shadow.Close(); |
|
561 test.Printf(_L("Free RAM=%08x at end of test\n"),FreeRam()); |
152 test.End(); |
562 test.End(); |
153 return(KErrNone); |
563 return(KErrNone); |
154 } |
564 } |
155 |
|