kerneltest/e32test/mmu/d_cache.cpp
changeset 0 a41df078684a
equal deleted inserted replaced
-1:000000000000 0:a41df078684a
       
     1 // Copyright (c) 2006-2009 Nokia Corporation and/or its subsidiary(-ies).
       
     2 // All rights reserved.
       
     3 // This component and the accompanying materials are made available
       
     4 // under the terms of the License "Eclipse Public License v1.0"
       
     5 // which accompanies this distribution, and is available
       
     6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     7 //
       
     8 // Initial Contributors:
       
     9 // Nokia Corporation - initial contribution.
       
    10 //
       
    11 // Contributors:
       
    12 //
       
    13 // Description:
       
    14 // e32test\debug\d_cache.cpp
       
    15 // See e32test\mmu\t_cache.cpp for details
       
    16 // 
       
    17 //
       
    18 
       
    19 #include "d_cache.h"
       
    20 #include <kernel/kern_priv.h>
       
    21 #include <kernel/cache.h>
       
    22 
       
    23 extern TUint32 GetCacheType();
       
    24 extern void TestCodeFunc();
       
    25 extern TInt TestCodeFuncSize();
       
    26 extern void DataSegmetTestFunct(void* aBase, TInt aSize);
       
    27 
       
    28 #ifdef __XSCALE_L2_CACHE__
       
    29 extern TUint32 L2CacheTypeReg();
       
    30 #endif
       
    31 
       
    32 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
    33 extern TUint32 CtrlRegister();
       
    34 extern TUint32 PRRRRegister();
       
    35 extern TUint32 NRRRRegister();
       
    36 extern void SetPRRR(TUint32);
       
    37 extern void SetNRRR(TUint32);
       
    38 #endif
       
    39 
       
    40 
       
    41 typedef void(CodeTest) ();
       
    42 
       
    43 class DCacheTest : public DLogicalChannelBase
       
    44 	{
       
    45 public:
       
    46 	DCacheTest();
       
    47 	~DCacheTest();
       
    48 protected:
       
    49 	virtual TInt DoCreate(TInt aUnit, const TDesC8* anInfo, const TVersion& aVer);
       
    50 	virtual TInt Request(TInt aFunction, TAny* a1, TAny* a2);
       
    51 private:
       
    52 	TInt GetCacheInfo(TAny* a1);
       
    53 	TInt TestDataChunk(TAny* a1);
       
    54 	TInt TestCodeChunk(TAny* a1);
       
    55 	TInt TestWriteBackMode(TAny* a1, TBool aWriteAlloc);
       
    56 	TInt TestL2Maintenance();
       
    57 	TInt GetThreshold(TAny* a1);
       
    58 	TInt SetThreshold(TAny* a1);
       
    59 	TInt TestUseCase(TAny* a1);
       
    60 	void LoopTestCodeFunc(CodeTest* f);
       
    61 
       
    62 
       
    63 	void GetExternalCacheInfo(RCacheTestDevice::TCacheInfo& info);
       
    64 
       
    65 	void CheckRemapping(RCacheTestDevice::TCacheInfo& info);
       
    66 	void Remap(RCacheTestDevice::TCacheAttr aCacheAttr);
       
    67 
       
    68 	TInt UseCase_ReadFromChunk(RCacheTestDevice::TChunkTest& info);
       
    69 	TInt UseCase_ReadFromChunk_ReadFromHeap(RCacheTestDevice::TChunkTest& info);
       
    70 	TInt UseCase_WriteToChunk(RCacheTestDevice::TChunkTest& info);
       
    71 	TInt UseCase_WriteToChunk_ReadFromHeap(RCacheTestDevice::TChunkTest& info);
       
    72 
       
    73 
       
    74 	//Phys. memory and shared chunk alloc/dealloc primitives
       
    75 	TInt AllocPhysicalRam(TInt aSize);
       
    76 	void FreePhysicalRam();
       
    77 	TInt CreateSharedChunk(TInt aMapAttr, TUint32& aActualMapAttr);
       
    78 	void CloseSharedChunk();
       
    79 
       
    80 private:
       
    81 	DChunk* 	iSharedChunk;	// Shared chunk used in the test
       
    82 	TPhysAddr 	iPhysAddr;		// Physical address of the allocated memory assigned to the chunk
       
    83 	TUint 		iSize;			// The size of the allocated memory.
       
    84 	TLinAddr 	iChunkBase;		// Base linear address of the shared chunk.
       
    85 
       
    86 	TInt* iHeap1;
       
    87 	TInt* iHeap2;
       
    88 	TUint32 iDummy;
       
    89 	};
       
    90 
       
    91 DCacheTest* CacheTestDriver;
       
    92 
       
    93 DCacheTest::DCacheTest() 	{}
       
    94 
       
    95 DCacheTest::~DCacheTest()	{CacheTestDriver = NULL;}
       
    96 
       
    97 /**Creates the channel*/
       
    98 TInt DCacheTest::DoCreate(TInt /*aUnit*/, const TDesC8* /*anInfo*/, const TVersion& /*aVer*/) {return KErrNone;}
       
    99 
       
   100 /** Allocates physical memory and sets iPhysAddr & iSize accordingly.*/
       
   101 TInt DCacheTest::AllocPhysicalRam(TInt aSize)
       
   102 	{
       
   103 	iSize = aSize;
       
   104 	NKern::ThreadEnterCS();
       
   105 	TInt r = Epoc::AllocPhysicalRam(aSize, iPhysAddr, 0); //Allocate physical RAM. This will set iPhysAddr
       
   106 	NKern::ThreadLeaveCS();
       
   107 	return r;
       
   108 	}
       
   109 
       
   110 /** Frees physical memory.*/
       
   111 void DCacheTest::FreePhysicalRam()
       
   112 	{
       
   113 	NKern::ThreadEnterCS();
       
   114 	Epoc::FreePhysicalRam(iPhysAddr, iSize);
       
   115 	NKern::ThreadLeaveCS();
       
   116 	}
       
   117 
       
   118 /**
       
   119 Creates shared chunks with allocated physical memory and sets iChunkBase accordingly.
       
   120 @pre Physical memory is allocated (iPhysAddr & iSize are set accordingly).
       
   121 */
       
   122 TInt DCacheTest::CreateSharedChunk(TInt aMapAttr, TUint32& aActualMapAttr)
       
   123 	{
       
   124 	TInt r;
       
   125     TChunkCreateInfo chunkInfo;
       
   126     chunkInfo.iType         = TChunkCreateInfo::ESharedKernelSingle;
       
   127     chunkInfo.iMaxSize      = iSize;
       
   128     chunkInfo.iMapAttr      = aMapAttr;
       
   129     chunkInfo.iOwnsMemory   = EFalse;
       
   130     chunkInfo.iDestroyedDfc = NULL;
       
   131 
       
   132 	NKern::ThreadEnterCS();
       
   133     if (KErrNone != (r = Kern::ChunkCreate(chunkInfo, iSharedChunk, iChunkBase, aActualMapAttr)))
       
   134 		{
       
   135 		FreePhysicalRam();
       
   136 		NKern::ThreadLeaveCS();
       
   137 		return r;
       
   138 		}
       
   139 	r = Kern::ChunkCommitPhysical(iSharedChunk,0,iSize, iPhysAddr);
       
   140     if(r!=KErrNone)
       
   141         {
       
   142 		CloseSharedChunk();
       
   143 		FreePhysicalRam();
       
   144 		NKern::ThreadLeaveCS();
       
   145 		return r;
       
   146 		}
       
   147 	NKern::ThreadLeaveCS();
       
   148 	return KErrNone;
       
   149 	}
       
   150 
       
   151 /** Closes shared chunk.*/
       
   152 void DCacheTest::CloseSharedChunk()
       
   153 	{
       
   154 	NKern::ThreadEnterCS();
       
   155 	Kern::ChunkClose(iSharedChunk);
       
   156 	Kern::HalFunction(EHalGroupKernel, EKernelHalSupervisorBarrier, 0, 0);	// make sure async close has happened
       
   157 	NKern::ThreadLeaveCS();
       
   158 	}
       
   159 
       
   160 
       
   161 #if defined(__CPU_ARMV7)
       
   162 extern TUint32 CacheTypeRegister();
       
   163 extern TUint32 CacheLevelIDRegister();
       
   164 extern TUint32 CacheSizeIdRegister(TUint32 aType/*0-1*/, TUint32 aLevel/*0-7*/);
       
   165 
       
   166 void ParseCacheLevelInfo(TInt aCacheSizeIDReg, RCacheTestDevice::TCacheSingle& aCS)
       
   167 	{
       
   168 	aCS.iSets = ((aCacheSizeIDReg>>13)& 0x7fff)+1;
       
   169 	aCS.iWays =   ((aCacheSizeIDReg>>3)& 0x3ff)+1;
       
   170 	aCS.iLineSize =1<<((aCacheSizeIDReg & 0x7)+4);//+2 (and +2 as we count in bytes)
       
   171 	aCS.iSize = aCS.iSets * aCS.iWays * aCS.iLineSize;
       
   172 	}
       
   173 #endif
       
   174 
       
   175 
       
   176 void AppendTo(TDes8& aDes, const char* aFmt, ...)
       
   177 	{
       
   178 	VA_LIST list;
       
   179 	VA_START(list,aFmt);
       
   180 	Kern::AppendFormat(aDes,aFmt,list);
       
   181 	}
       
   182 
       
   183 /** Checks Memory Remap settings (both memory type and access permission remapping).*/
       
   184 void DCacheTest::CheckRemapping(RCacheTestDevice::TCacheInfo& info)
       
   185 	{
       
   186 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
   187 	TUint32 cr = CtrlRegister();
       
   188 	TUint32 prrr =PRRRRegister();
       
   189 	TUint32 nrrr =NRRRRegister();
       
   190 	AppendTo(info.iDesc,"Memory Remapping: CtrlReg:%xH, PRRR:%xH NRRR:%xH\n", cr, prrr, nrrr);
       
   191 
       
   192 	if ( (cr&0x30000000) == 0x30000000)
       
   193 		info.iMemoryRemapping = 1;
       
   194 	else
       
   195 		AppendTo(info.iDesc,"Error:Memory Remapping is OFF \n");
       
   196 #endif
       
   197 	}
       
   198 
       
   199 //Remaps aCacheAttr memory type into EMemAttKernelInternal4
       
   200 void DCacheTest::Remap(RCacheTestDevice::TCacheAttr aCacheAttr)
       
   201 	{
       
   202 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
   203 	TInt inner, outer;
       
   204 	switch(aCacheAttr)
       
   205 		{
       
   206 		case RCacheTestDevice::E_InnerWT_Remapped: 	inner=2;outer=0;break;
       
   207 		case RCacheTestDevice::E_InnerWBRA_Remapped:inner=3;outer=0;break;
       
   208 		case RCacheTestDevice::E_InnerWB_Remapped:	inner=1;outer=0;break;
       
   209 		case RCacheTestDevice::E_OuterWT_Remapped:	inner=0;outer=2;break;
       
   210 		case RCacheTestDevice::E_OuterWBRA_Remapped:inner=0;outer=3;break;
       
   211 		case RCacheTestDevice::E_OuterWB_Remapped:	inner=0;outer=1;break;
       
   212 		case RCacheTestDevice::E_InOutWT_Remapped:	inner=2;outer=2;break;
       
   213 		case RCacheTestDevice::E_InOutWBRA_Remapped:inner=3;outer=3;break;
       
   214 		case RCacheTestDevice::E_InOutWB_Remapped:	inner=1;outer=1;break;
       
   215 		default:Kern::PanicCurrentThread(_L("d_cache driver error"),0);return;
       
   216 		}
       
   217 
       
   218 	TUint32 prrr =PRRRRegister();
       
   219 	TUint32 nrrr =NRRRRegister();
       
   220 	prrr &= ~(3<<8);	// Clear EMemAttKernelInternal4 setting for memory type
       
   221 	nrrr &= ~(3<<8); 	// Clear EMemAttKernelInternal4 setting for normal memory type, inner cache
       
   222 	nrrr &= ~(3<<24);	// Clear EMemAttKernelInternal4 setting for normal memory type, outer cache
       
   223 	prrr |= 2 <<8; 		// Set EMemAttKernelInternal4 as normal memory
       
   224 	nrrr |= inner <<8;	// Set inner cache for EMemAttKernelInternal4 
       
   225 	nrrr |= outer << 24;// Set outer cache for EMemAttKernelInternal4 
       
   226 
       
   227 	SetPRRR(prrr);
       
   228 	SetNRRR(nrrr);
       
   229 #endif
       
   230 	}
       
   231 
       
   232 
       
   233 
       
   234 /** Fills in info structure with external cache parameters. */
       
   235 void DCacheTest::GetExternalCacheInfo(RCacheTestDevice::TCacheInfo& info)
       
   236 	{
       
   237 #if defined(__HAS_EXTERNAL_CACHE__)
       
   238 	info.iOuterCache=1;
       
   239 
       
   240 #if defined(__ARM_L210_CACHE__)
       
   241 	AppendTo(info.iDesc,"Built as L210 Cache;\n");
       
   242 #elif defined(__ARM_L220_CACHE__)
       
   243 	AppendTo(info.iDesc,"Built as L220 Cache:\n");
       
   244 #elif defined(__ARM_PL310_CACHE__)
       
   245 	AppendTo(info.iDesc,"Built as PL310 Cache:\n");
       
   246 #endif
       
   247 
       
   248 	TInt cacheController = Kern::SuperPage().iArmL2CacheBase;
       
   249 	if (!cacheController)
       
   250 		{
       
   251 		AppendTo(info.iDesc,"Warning:No CCB Address in Super Page?\n");
       
   252 		return;
       
   253 		}
       
   254 		
       
   255 	TInt rawData = *(TInt*)(cacheController);   //reg 0 in controller is Cache ID Register
       
   256 	AppendTo(info.iDesc,"L2 ID Reg:%xH\n", rawData);
       
   257 
       
   258 	rawData = *(TInt*)(cacheController+4); //reg 4 in controller is Cache Type Register
       
   259 	AppendTo(info.iDesc,"L2 Type Reg:%xH\n", rawData);
       
   260 
       
   261 	RCacheTestDevice::TCacheSingle& cs = info.iCache[info.iCacheCount];
       
   262 
       
   263 	cs.iLineSize=32; //always
       
   264 #if defined(__ARM_L210_CACHE__) || defined(__ARM_L220_CACHE__)
       
   265 	cs.iWays = (rawData>>3)&0x0f;	if (cs.iWays > 8) cs.iWays = 8;
       
   266 #elif defined(__ARM_PL310_CACHE__)
       
   267 	cs.iWays = (rawData&0x40) ? 16:8;
       
   268 #endif
       
   269 	TInt waySize;
       
   270 	switch((rawData>>8)&7)
       
   271 		{
       
   272 		case 0:		waySize = 0x4000;  break;
       
   273 		case 1:		waySize = 0x4000;  break;
       
   274 		case 2:		waySize = 0x8000;  break;
       
   275 		case 3:		waySize = 0x10000; break;
       
   276 		case 4:		waySize = 0x20000; break;
       
   277 #if defined(__ARM_L210_CACHE__) || defined(__ARM_L220_CACHE__)
       
   278 		default:	waySize = 0x40000; break;
       
   279 #elif defined(__ARM_PL310_CACHE__)
       
   280 		case 5:		waySize = 0x40000; break;
       
   281 		default:	waySize = 0x80000; break;
       
   282 #endif
       
   283 		}
       
   284 	cs.iSize = waySize * cs.iWays;
       
   285 	cs.iSets = waySize >> 5; // = waySize / lineLen 
       
   286 
       
   287 
       
   288 	cs.iLevel = 2;
       
   289 	cs.iCode = 1;
       
   290 	cs.iData = 1;
       
   291 	cs.iDesc.SetLength(0);
       
   292 	AppendTo(cs.iDesc,"Outer Unified PAPT");
       
   293 
       
   294 	info.iMaxCacheSize = Max(info.iMaxCacheSize, cs.iSize);
       
   295 	info.iCacheCount++;
       
   296 #endif //defined(__HAS_EXTERNAL_CACHE__)
       
   297 	}
       
   298 
       
   299 
       
   300 /** Passes cache configuration parameters to the user side*/
       
   301 TInt DCacheTest::GetCacheInfo(TAny* a1)
       
   302 	{
       
   303 	TInt ret = KErrNone;
       
   304 	RCacheTestDevice::TCacheInfo info;
       
   305 
       
   306 	info.iDesc.SetLength(0);
       
   307 	info.iCacheCount=0;
       
   308 	info.iMaxCacheSize=0;
       
   309 	info.iMemoryRemapping=0;
       
   310 	info.iOuterCache=0;
       
   311 
       
   312 ////////////////////////
       
   313 #if defined(__CPU_ARMV7)
       
   314 ////////////////////////
       
   315 	info.iOuterCache=1;
       
   316 
       
   317 	TUint32 ctr=CacheTypeRegister();
       
   318 	TUint32 clr=CacheLevelIDRegister();
       
   319 	TInt LoC = (clr>>24)&7;	//The number of levels to be purged/clean to Point-to-Coherency
       
   320 	TInt LoU = (clr>>27)&7;	//The number of levels to be purged/clean to Point-to-Unification
       
   321 	AppendTo(info.iDesc,"ARMv7 cache - CTR:%xH CLR:%xH LoC:%d LoU:%d\n", ctr, clr, LoC, LoU);
       
   322 	
       
   323 	RCacheTestDevice::TCacheSingle* cs = &info.iCache[info.iCacheCount];
       
   324 	TInt level;
       
   325 	for (level=0;level<LoC;level++)
       
   326 		{
       
   327 		TInt type = (clr >> (level*3)) & 7; //000:NoCache 001:ICache 010:DCache 011:Both 100:Unified
       
   328 		
       
   329 		if (type==0)		// No Cache. Also no cache below this level
       
   330 			break;
       
   331 		
       
   332 		if(type & 1) 	// Instruction Cache
       
   333 			{
       
   334 			TInt csr = CacheSizeIdRegister(1,level);
       
   335 			ParseCacheLevelInfo(csr, *cs);
       
   336 			cs->iLevel = level+1;
       
   337 			cs->iCode = 1;
       
   338 			cs->iData = 0;
       
   339 			AppendTo(cs->iDesc,"ICache CSR:%xH",csr);
       
   340 			info.iMaxCacheSize = Max(info.iMaxCacheSize, cs->iSize);
       
   341 			cs = &info.iCache[++info.iCacheCount];
       
   342 			}
       
   343 			
       
   344 		if(type & 2) 	// Data Cache
       
   345 			{
       
   346 			TInt csr = CacheSizeIdRegister(0,level);
       
   347 			ParseCacheLevelInfo(csr, *cs);
       
   348 			cs->iLevel = level+1;
       
   349 			cs->iCode = 0;
       
   350 			cs->iData = 1;
       
   351 			AppendTo(cs->iDesc,"DCache CSR:%xH",csr);
       
   352 			info.iMaxCacheSize = Max(info.iMaxCacheSize, cs->iSize);
       
   353 			cs = &info.iCache[++info.iCacheCount];
       
   354 			}
       
   355 
       
   356 		if(type & 4) 	// Unified Cache
       
   357 			{
       
   358 			TInt csr = CacheSizeIdRegister(0,level);
       
   359 			ParseCacheLevelInfo(csr, *cs);
       
   360 			cs->iLevel = level+1;
       
   361 			cs->iCode = 1;
       
   362 			cs->iData = 1;
       
   363 			AppendTo(cs->iDesc,"Unified CSR:%xH",csr);
       
   364 			info.iMaxCacheSize = Max(info.iMaxCacheSize, cs->iSize);
       
   365 			cs = &info.iCache[++info.iCacheCount];
       
   366 			}
       
   367 		}
       
   368 
       
   369 ///////////////////////////////////
       
   370 #elif defined(__CPU_HAS_CACHE_TYPE_REGISTER)
       
   371 ///////////////////////////////////
       
   372 
       
   373 	TInt rawData=GetCacheType();
       
   374 	TInt splitCache=rawData&0x01000000;
       
   375 	AppendTo(info.iDesc,"L1 Cache TypeReg=%xH\n", rawData);
       
   376 
       
   377 	//Cache #1	
       
   378 	TUint32 s=(rawData>>12)&0xfff;  		//s = P[11]:0:size[9:5]:assoc[5:3]:M[2]:len[1:0] 
       
   379 	info.iCache[info.iCacheCount].iLineSize = 1 << ((s&2) + 3); 							//1<<(len+3)
       
   380 	info.iCache[info.iCacheCount].iWays = (2 + ((s>>2)&1)) << (((s>>3)&0x7) - 1);			//(2+M) << (assoc-1)
       
   381 	info.iCache[info.iCacheCount].iSize = (2 + ((s>>2)&1)) << (((s>>6)&0xf) + 8);			//(2+M) << (size+8)
       
   382 	info.iCache[info.iCacheCount].iSets = 1 << (((s>>6)&0xf) + 6 - ((s>>3)&0x7) - (s&2));	//(2+M) <<(size + 6 -assoc - len)
       
   383 	info.iCache[info.iCacheCount].iData = 1;
       
   384 	info.iCache[info.iCacheCount].iLevel = 1;
       
   385 
       
   386 	if (splitCache)
       
   387 		{
       
   388 		info.iCache[info.iCacheCount].iCode = 0;
       
   389 		info.iCache[info.iCacheCount].iDesc.SetLength(0);
       
   390 		AppendTo(info.iCache[info.iCacheCount].iDesc,"Inner DCache");
       
   391 
       
   392 		#if defined(__CPU_ARMV6)
       
   393 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAPT");
       
   394 		#else
       
   395 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAVT");
       
   396 		#endif		
       
   397 		info.iMaxCacheSize = Max(info.iMaxCacheSize, info.iCache[info.iCacheCount].iSize);
       
   398 		info.iCacheCount++;
       
   399 
       
   400 		// Cache #2
       
   401 		s=rawData&0xfff;  		//s = P[11]:0:size[9:5]:assoc[5:3]:M[2]:len[1:0] 
       
   402 		info.iCache[info.iCacheCount].iLineSize = 1 << ((s&2) + 3); 							//1<<(len+3)
       
   403 		info.iCache[info.iCacheCount].iWays = (2 + ((s>>2)&1)) << (((s>>3)&0x7) - 1);			//(2+M) << (assoc-1)
       
   404 		info.iCache[info.iCacheCount].iSize = (2 + ((s>>2)&1)) << (((s>>6)&0xf) + 8);			//(2+M) << (size+8)
       
   405 		info.iCache[info.iCacheCount].iSets = 1 << (((s>>6)&0xf) + 6 - ((s>>3)&0x7) - (s&2));	//(2+M) <<(size + 6 -assoc - len)
       
   406 		info.iCache[info.iCacheCount].iLevel = 1;
       
   407 		info.iCache[info.iCacheCount].iCode = 1;
       
   408 		info.iCache[info.iCacheCount].iData = 0;
       
   409 		info.iCache[info.iCacheCount].iDesc.SetLength(0);
       
   410 		AppendTo(info.iCache[info.iCacheCount].iDesc,"Inner ICache");
       
   411 		#if defined(__CPU_ARMV6)
       
   412 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAPT");
       
   413 		#else
       
   414 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAVT");
       
   415 		#endif		
       
   416 		}
       
   417 	else
       
   418 	{
       
   419 		info.iCache[info.iCacheCount].iCode = 1;
       
   420 		info.iCache[info.iCacheCount].iDesc.SetLength(0);
       
   421 		AppendTo(info.iCache[info.iCacheCount].iDesc,"Inner Unified");
       
   422 		#if defined(__CPU_ARMV6)
       
   423 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAPT");
       
   424 		#else
       
   425 		AppendTo(info.iCache[info.iCacheCount].iDesc," VAVT");
       
   426 		#endif		
       
   427 	}		
       
   428 	info.iMaxCacheSize = Max(info.iMaxCacheSize, info.iCache[info.iCacheCount].iSize);
       
   429 	info.iCacheCount++;
       
   430 
       
   431 /////
       
   432 #else
       
   433 /////
       
   434 
       
   435 	ret = KErrNotSupported;
       
   436 
       
   437 #endif
       
   438 
       
   439 	GetExternalCacheInfo(info); // Get ARMl210/20 info
       
   440 	CheckRemapping(info);		// Get memory remapping info
       
   441 
       
   442 	info.iDmaBufferAlignment = Cache::DmaBufferAlignment();
       
   443 	kumemput(a1,&info,sizeof(info));
       
   444 	return ret;
       
   445 	}
       
   446 
       
   447 /** Get cache thresholds.*/
       
   448 TInt DCacheTest::GetThreshold(TAny* a1)
       
   449 	{
       
   450 	RCacheTestDevice::TThresholdInfo info;
       
   451 	kumemget(&info,a1,sizeof(info));
       
   452 
       
   453 	TCacheThresholds thresholds;
       
   454 	TInt r = Cache::GetThresholds(thresholds, info.iCacheType);
       
   455 	if (r==KErrNone)
       
   456 		{
       
   457 		info.iPurge = thresholds.iPurge;	
       
   458 		info.iClean = thresholds.iClean;	
       
   459 		info.iFlush = thresholds.iFlush;	
       
   460 		kumemput(a1,&info,sizeof(info));
       
   461 		}
       
   462 	return r;
       
   463 	}
       
   464 
       
   465 /** Set cache thresholds.*/
       
   466 TInt DCacheTest::SetThreshold(TAny* a1)
       
   467 	{
       
   468 	RCacheTestDevice::TThresholdInfo info;
       
   469 	kumemget(&info,a1,sizeof(info));
       
   470 
       
   471 	TCacheThresholds thresholds;
       
   472 	thresholds.iPurge = info.iPurge;
       
   473 	thresholds.iClean = info.iClean;
       
   474 	thresholds.iFlush = info.iFlush;
       
   475 	return Cache::SetThresholds(thresholds, info.iCacheType);
       
   476 	}
       
   477 
       
   478 // Runs DataSegmetTestFunct against data from a chunk.
       
   479 // Chunk cache attributes and its size are specified in input arguments.
       
   480 // Measures and returns the time spent.
       
   481 TInt DCacheTest::TestDataChunk(TAny* a1)
       
   482 	{
       
   483 	TInt r = KErrNone;
       
   484 	TInt time;
       
   485 	
       
   486 	RCacheTestDevice::TChunkTest info;
       
   487 	kumemget(&info,a1,sizeof(info));
       
   488 
       
   489 
       
   490 	TUint32 chunkAttr = EMapAttrSupRw;
       
   491 	if (info.iShared) chunkAttr |= EMapAttrShared;
       
   492 #ifdef __SMP__
       
   493 	TUint32 force_shared = EMapAttrShared;
       
   494 #else
       
   495 	TUint32 force_shared = 0;
       
   496 #endif
       
   497 
       
   498 	switch (info.iCacheAttr)
       
   499 		{
       
   500 
       
   501 		case RCacheTestDevice::E_FullyBlocking:	chunkAttr |= EMapAttrFullyBlocking; break;
       
   502 		case RCacheTestDevice::E_Buffered_NC:	chunkAttr |= EMapAttrBufferedNC; break;
       
   503 		case RCacheTestDevice::E_Buffered_C:	chunkAttr |= EMapAttrBufferedC; break;
       
   504 
       
   505 		case RCacheTestDevice::E_InnerWT:		chunkAttr |= EMapAttrCachedWTRA|force_shared; break;
       
   506 		case RCacheTestDevice::E_InnerWBRA:		chunkAttr |= EMapAttrCachedWBRA|force_shared; break;
       
   507 		case RCacheTestDevice::E_InnerWB:		chunkAttr |= EMapAttrCachedWBWA|force_shared; break;
       
   508 
       
   509 		case RCacheTestDevice::E_OuterWT:		chunkAttr |= EMapAttrL2CachedWTRA; break;
       
   510 		case RCacheTestDevice::E_OuterWBRA:		chunkAttr |= EMapAttrL2CachedWBRA; break;
       
   511 		case RCacheTestDevice::E_OuterWB:		chunkAttr |= EMapAttrL2CachedWBWA; break;
       
   512 
       
   513 		case RCacheTestDevice::E_InOutWT:		chunkAttr |= EMapAttrCachedWTRA|EMapAttrL2CachedWTRA|force_shared; break;
       
   514 		case RCacheTestDevice::E_InOutWBRA:		chunkAttr |= EMapAttrCachedWBRA|EMapAttrL2CachedWBRA|force_shared; break;
       
   515 		case RCacheTestDevice::E_InOutWB:		chunkAttr |= EMapAttrCachedWBWA|EMapAttrL2CachedWBWA|force_shared; break;
       
   516 
       
   517 		case RCacheTestDevice::E_StronglyOrder:
       
   518 			new (&chunkAttr) TMappingAttributes2(EMemAttStronglyOrdered,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   519 			break;
       
   520 		case RCacheTestDevice::E_Device:
       
   521 			new (&chunkAttr) TMappingAttributes2(EMemAttDevice,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   522 			break;
       
   523 		case RCacheTestDevice::E_Normal_Uncached:
       
   524 			new (&chunkAttr) TMappingAttributes2(EMemAttNormalUncached,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   525 			break;
       
   526 		case RCacheTestDevice::E_Normal_Cached:
       
   527 			new (&chunkAttr) TMappingAttributes2(EMemAttNormalCached,EFalse,ETrue,EFalse,(info.iShared|force_shared)?ETrue:EFalse);
       
   528 			break;
       
   529 		case RCacheTestDevice::E_KernelInternal4:
       
   530 			new (&chunkAttr) TMappingAttributes2(EMemAttKernelInternal4,EFalse,ETrue,ETrue,(info.iShared|force_shared)?ETrue:EFalse);
       
   531 			break;
       
   532 		case RCacheTestDevice::E_PlatformSpecific5:
       
   533 			new (&chunkAttr) TMappingAttributes2(EMemAttPlatformSpecific5,EFalse,ETrue,ETrue,(info.iShared|force_shared)?ETrue:EFalse);
       
   534 			break;
       
   535 		case RCacheTestDevice::E_PlatformSpecific6:
       
   536 			new (&chunkAttr) TMappingAttributes2(EMemAttPlatformSpecific6,EFalse,ETrue,ETrue,(info.iShared|force_shared)?ETrue:EFalse);
       
   537 			break;
       
   538 		case RCacheTestDevice::E_PlatformSpecific7:
       
   539 			new (&chunkAttr) TMappingAttributes2(EMemAttPlatformSpecific7,EFalse,ETrue,ETrue,(info.iShared|force_shared)?ETrue:EFalse);
       
   540 			break;
       
   541 
       
   542 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
   543 		case RCacheTestDevice::E_InnerWT_Remapped:
       
   544 		case RCacheTestDevice::E_InnerWBRA_Remapped:
       
   545 		case RCacheTestDevice::E_InnerWB_Remapped:
       
   546 		case RCacheTestDevice::E_InOutWT_Remapped:
       
   547 		case RCacheTestDevice::E_InOutWBRA_Remapped:
       
   548 		case RCacheTestDevice::E_InOutWB_Remapped:
       
   549 #ifdef __SMP__
       
   550 			info.iShared = ETrue;
       
   551 #endif
       
   552 		case RCacheTestDevice::E_OuterWT_Remapped:
       
   553 		case RCacheTestDevice::E_OuterWBRA_Remapped:
       
   554 		case RCacheTestDevice::E_OuterWB_Remapped:
       
   555 			Remap(info.iCacheAttr);
       
   556 			new (&chunkAttr) TMappingAttributes2(EMemAttKernelInternal4,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   557 			break;
       
   558 #endif
       
   559 			
       
   560 		case RCacheTestDevice::E_Default:
       
   561 			{
       
   562 			// Run the test against memory from kernel heap (no need for extra memory chunks)
       
   563 			NKern::ThreadEnterCS();
       
   564 			TLinAddr bufferBase = (TLinAddr)Kern::Alloc(info.iSize);
       
   565 			NKern::ThreadLeaveCS();
       
   566 			if (!bufferBase)
       
   567 					return KErrNoMemory;
       
   568 		
       
   569 			//You can't purge  allocated heap memory as it will invalidate other data from the same cache line.
       
   570 			//Cache::SyncMemoryAfterDmaRead((TLinAddr)bufferBase, info.iSize);
       
   571 
       
   572 			// Execute the test
       
   573 			time = NKern::TickCount();
       
   574 			DataSegmetTestFunct((void*)bufferBase, info.iSize);
       
   575 			info.iTime = NKern::TickCount() - time;
       
   576 			info.iActualMapAttr = 0;
       
   577 			kumemput(a1,&info,sizeof(info));
       
   578 
       
   579 			NKern::ThreadEnterCS();
       
   580 			Kern::Free((TAny*)bufferBase);
       
   581 			NKern::ThreadLeaveCS();
       
   582 
       
   583 			return KErrNone;
       
   584 			}
       
   585 		default:
       
   586 			return KErrArgument;		
       
   587 		}
       
   588 
       
   589 	// Run the test against chunk with cache attributes as specified in info.iCacheState.
       
   590 	if (KErrNone!=(r=AllocPhysicalRam(Kern::RoundToPageSize(info.iSize)))) return r;
       
   591 	if (KErrNone!=(r=CreateSharedChunk(chunkAttr, info.iActualMapAttr))) return r;
       
   592 	
       
   593 	Cache::SyncMemoryAfterDmaRead(iChunkBase, info.iSize); // Invalidate (aka purge) cache.
       
   594 
       
   595 	time = NKern::TickCount();
       
   596 	DataSegmetTestFunct((void*)iChunkBase, info.iSize);
       
   597 	info.iTime = NKern::TickCount() - time;
       
   598 
       
   599 	CloseSharedChunk();
       
   600 	FreePhysicalRam();
       
   601 
       
   602 	kumemput(a1,&info,sizeof(info));
       
   603 	return KErrNone;
       
   604 	}
       
   605 
       
   606 void DCacheTest::LoopTestCodeFunc(CodeTest* f)
       
   607 	{
       
   608 	for (TInt x = 0;x<5000;x++)
       
   609 		(*f)();
       
   610 	}
       
   611 
       
   612 // Runs TestCodeFunc (contains nops with ret at the end) from a chunk.
       
   613 // Chunk cache attributes and the size of function are specified in input arguments
       
   614 // Measures and returns the time spent.
       
   615 TInt DCacheTest::TestCodeChunk(TAny* a1)
       
   616 	{
       
   617 	TInt r = KErrNone;
       
   618 	TInt time;
       
   619 	
       
   620 	RCacheTestDevice::TChunkTest info;
       
   621 	kumemget(&info,a1,sizeof(info));
       
   622 
       
   623 
       
   624 	info.iActualMapAttr = EMapAttrSupRwx;
       
   625 	if (info.iShared) info.iActualMapAttr |= EMapAttrShared;
       
   626 #ifdef __SMP__
       
   627 	TUint32 force_shared = EMapAttrShared;
       
   628 #else
       
   629 	TUint32 force_shared = 0;
       
   630 #endif
       
   631 
       
   632 	switch (info.iCacheAttr)
       
   633 		{
       
   634 		case RCacheTestDevice::E_FullyBlocking:	info.iActualMapAttr |= EMapAttrFullyBlocking; break;
       
   635 		case RCacheTestDevice::E_Buffered_NC:	info.iActualMapAttr |= EMapAttrBufferedNC; break;
       
   636 		case RCacheTestDevice::E_Buffered_C:	info.iActualMapAttr |= EMapAttrBufferedC; break;
       
   637 
       
   638 		case RCacheTestDevice::E_InnerWT:		info.iActualMapAttr |= EMapAttrCachedWTRA|force_shared; break;
       
   639 		case RCacheTestDevice::E_InnerWBRA:		info.iActualMapAttr |= EMapAttrCachedWBRA|force_shared; break;
       
   640 		case RCacheTestDevice::E_InnerWB:		info.iActualMapAttr |= EMapAttrCachedWBWA|force_shared; break;
       
   641 
       
   642 		case RCacheTestDevice::E_OuterWT:		info.iActualMapAttr |= EMapAttrL2CachedWTRA; break;
       
   643 		case RCacheTestDevice::E_OuterWBRA:		info.iActualMapAttr |= EMapAttrL2CachedWBRA; break;
       
   644 		case RCacheTestDevice::E_OuterWB:		info.iActualMapAttr |= EMapAttrL2CachedWBWA; break;
       
   645 
       
   646 		case RCacheTestDevice::E_InOutWT:		info.iActualMapAttr |= EMapAttrCachedWTRA|EMapAttrL2CachedWTRA|force_shared; break;
       
   647 		case RCacheTestDevice::E_InOutWBRA:		info.iActualMapAttr |= EMapAttrCachedWBRA|EMapAttrL2CachedWBRA|force_shared; break;
       
   648 		case RCacheTestDevice::E_InOutWB:		info.iActualMapAttr |= EMapAttrCachedWBWA|EMapAttrL2CachedWBWA|force_shared; break;
       
   649 
       
   650 		case RCacheTestDevice::E_StronglyOrder:
       
   651 			new (&info.iActualMapAttr) TMappingAttributes2(EMemAttStronglyOrdered,EFalse,ETrue,ETrue,info.iShared?ETrue:EFalse);
       
   652 			break;
       
   653 		case RCacheTestDevice::E_Device:
       
   654 			new (&info.iActualMapAttr) TMappingAttributes2(EMemAttDevice,EFalse,ETrue,ETrue,info.iShared?ETrue:EFalse);
       
   655 			break;
       
   656 		case RCacheTestDevice::E_Normal_Uncached:
       
   657 			new (&info.iActualMapAttr) TMappingAttributes2(EMemAttNormalUncached,EFalse,ETrue,ETrue,info.iShared?ETrue:EFalse);
       
   658 			break;
       
   659 		case RCacheTestDevice::E_Normal_Cached:
       
   660 			new (&info.iActualMapAttr) TMappingAttributes2(EMemAttNormalCached,EFalse,ETrue,ETrue,info.iShared?ETrue:EFalse);
       
   661 			break;
       
   662 
       
   663 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
   664 		case RCacheTestDevice::E_InnerWT_Remapped:
       
   665 		case RCacheTestDevice::E_InnerWBRA_Remapped:
       
   666 		case RCacheTestDevice::E_InnerWB_Remapped:
       
   667 		case RCacheTestDevice::E_InOutWT_Remapped:
       
   668 		case RCacheTestDevice::E_InOutWBRA_Remapped:
       
   669 		case RCacheTestDevice::E_InOutWB_Remapped:
       
   670 #ifdef __SMP__
       
   671 			info.iShared = ETrue;
       
   672 #endif
       
   673 		case RCacheTestDevice::E_OuterWT_Remapped:
       
   674 		case RCacheTestDevice::E_OuterWBRA_Remapped:
       
   675 		case RCacheTestDevice::E_OuterWB_Remapped:
       
   676 			Remap(info.iCacheAttr);
       
   677 			new (&info.iActualMapAttr) TMappingAttributes2(EMemAttKernelInternal4,EFalse,ETrue,ETrue,info.iShared?ETrue:EFalse);
       
   678 			break;
       
   679 #endif
       
   680 			
       
   681 		case RCacheTestDevice::E_Default:
       
   682 			{
       
   683 			// Run the test against test function from rom image (no need for extra memory chunks)
       
   684 			if (info.iSize > TestCodeFuncSize())
       
   685 				return KErrNoMemory; // TestCodeFunc is not big enough to conduct the test.
       
   686 			
       
   687 			TInt startAddr = (TInt)TestCodeFunc + TestCodeFuncSize() - info.iSize;
       
   688 			
       
   689 			// This will invalidate (aka purge) test function from L2 cache.
       
   690 			Cache::SyncMemoryAfterDmaRead((TLinAddr)startAddr, info.iSize); 
       
   691 
       
   692 			// Execute the test
       
   693 			time = NKern::TickCount();
       
   694 			LoopTestCodeFunc((CodeTest*)startAddr);
       
   695 			info.iTime = NKern::TickCount() - time;
       
   696 
       
   697 			info.iActualMapAttr = 0; //Not relevant.
       
   698 			kumemput(a1,&info,sizeof(info));
       
   699 			return KErrNone;
       
   700 			}
       
   701 		default:
       
   702 			return KErrArgument;		
       
   703 		}
       
   704 
       
   705 	// Run the test against test function from memory chunk with cache attributes as specified in info.iCacheState.
       
   706 	// As we need a chunk with eXecutable permission attribute, can't use shared chunk. Take HwChunk instead.
       
   707 	DPlatChunkHw* chunk;
       
   708 	TPhysAddr physBase;		// This will be base physical address of the chunk
       
   709     TLinAddr linearBase;	// This will be base linear address of the chunk
       
   710 	NKern::ThreadEnterCS();
       
   711 	r = Epoc::AllocPhysicalRam(Kern::RoundToPageSize(info.iSize), physBase, 0);//Allocate RAM. This will set aPhysAddr
       
   712 	if (r)
       
   713 		{
       
   714 		NKern::ThreadLeaveCS();
       
   715 		return r;
       
   716 		}
       
   717 	r = DPlatChunkHw::New (chunk, physBase, Kern::RoundToPageSize(info.iSize), info.iActualMapAttr);//Create chunk
       
   718 	if (r)
       
   719 		{
       
   720 		Epoc::FreePhysicalRam(physBase, Kern::RoundToPageSize(info.iSize));
       
   721 		NKern::ThreadLeaveCS();
       
   722 		return r;
       
   723 		}
       
   724 	NKern::ThreadLeaveCS();
       
   725 
       
   726 	linearBase = chunk->LinearAddress();
       
   727 
       
   728 	// Create nop,nop,...,nop,ret sequence at the start of the chunk with size = info.iSize
       
   729 	TInt nopInstr = ((TInt*)TestCodeFunc)[0]; 						// NOP is the first instruction from TestCodeFunc
       
   730 	TInt retInstr = ((TInt*)TestCodeFunc)[TestCodeFuncSize()/4-1];	// RET is the last instruction in TestCodeFunc 	
       
   731 	for (TInt i = 0; i < (info.iSize/4-1) ; i++)  	// Put all NOPs...
       
   732 		((TInt*)linearBase)[i] = nopInstr;			// ...
       
   733 	((TInt*)linearBase)[info.iSize/4-1] = retInstr;	// ... and add RET at the end.
       
   734 
       
   735 	Cache::IMB_Range((TLinAddr)linearBase, info.iSize); 			// Sync L1 Instruction & Data cache
       
   736 	//Fluch the memory from which the test funcion executes. This will give fair chance to all test cases.
       
   737 	Cache::SyncMemoryBeforeDmaWrite(linearBase, info.iSize);		// This will clean L1&L2 cache.
       
   738 	Cache::SyncMemoryAfterDmaRead(linearBase, info.iSize);			// This will invalidate (aka purge) L1&L2 cache.
       
   739 
       
   740 	// Execute the test
       
   741 	time = NKern::TickCount();
       
   742 	LoopTestCodeFunc((CodeTest*)linearBase);
       
   743 	info.iTime = NKern::TickCount() - time;
       
   744 
       
   745 	kumemput(a1,&info,sizeof(info));
       
   746 
       
   747 	NKern::ThreadEnterCS();
       
   748 	chunk->Close(NULL);
       
   749 	Epoc::FreePhysicalRam(physBase, Kern::RoundToPageSize(info.iSize));
       
   750 	NKern::ThreadLeaveCS();
       
   751 	return KErrNone;
       
   752 	}
       
   753 
       
   754 /**
       
   755 Tests WriteBack mode:
       
   756 	(1)Writes down data into BW cached memory.
       
   757 	(2)Purge the cache.
       
   758 	(3)Counts the bytes that reach the main memory.
       
   759 @param aWriteAlloc True if WriteAllocate to test, EFalse if ReadAllocate
       
   760 */
       
   761 TInt DCacheTest::TestWriteBackMode(TAny* a1, TBool aWriteAlloc)
       
   762 	{
       
   763 	TInt r, cacheAttr = EMapAttrSupRw;
       
   764 	TUint i, counter = 0;
       
   765 	const TInt pattern = 0xabcdef12;
       
   766 
       
   767 	RCacheTestDevice::TChunkTest info;
       
   768 	kumemget(&info,a1,sizeof(info));
       
   769 #ifdef __SMP__
       
   770 	TUint32 force_shared = EMapAttrShared;
       
   771 #else
       
   772 	TUint32 force_shared = 0;
       
   773 #endif
       
   774 
       
   775 	switch (info.iCacheAttr)
       
   776 		{
       
   777 #if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
   778 		case RCacheTestDevice::E_InnerWBRA_Remapped:
       
   779 		case RCacheTestDevice::E_InnerWB_Remapped:
       
   780 		case RCacheTestDevice::E_OuterWBRA_Remapped:
       
   781 		case RCacheTestDevice::E_OuterWB_Remapped:
       
   782 			Remap(info.iCacheAttr);
       
   783 			new (&cacheAttr) TMappingAttributes2(EMemAttKernelInternal4,EFalse,ETrue,ETrue,force_shared);
       
   784 			break;
       
   785 #endif
       
   786 		case RCacheTestDevice::E_InnerWBRA:	cacheAttr |= EMapAttrCachedWBRA|force_shared; 	break;
       
   787 		case RCacheTestDevice::E_InnerWB:	cacheAttr |= EMapAttrCachedWBWA|force_shared; 	break;
       
   788 		case RCacheTestDevice::E_OuterWBRA:	cacheAttr |= EMapAttrL2CachedWBRA|force_shared;	break;
       
   789 		case RCacheTestDevice::E_OuterWB:	cacheAttr |= EMapAttrL2CachedWBWA|force_shared;	break;
       
   790 		default: return KErrArgument;
       
   791 		}
       
   792 	// Create chunk
       
   793 	if (KErrNone!=(r=AllocPhysicalRam(info.iSize))) return r;
       
   794 	if (KErrNone!=(r=CreateSharedChunk(cacheAttr, info.iActualMapAttr))) return r;
       
   795 	
       
   796 	for (i=0; i<(iSize>>2) ; i++) ((TInt*)iChunkBase)[i] = 0;   //Zero-fill cache and...
       
   797 	Cache::SyncMemoryBeforeDmaWrite(iChunkBase, iSize);			//... clean the cache down to memory
       
   798 
       
   799 	Cache::SyncMemoryAfterDmaRead(iChunkBase, iSize);			//Invalidate (aka purge).
       
   800 
       
   801 	// Fill in cached region with the pattern.
       
   802 	for (i=0; i<(iSize>>2); i++)
       
   803 	 	{
       
   804 	 	if (!aWriteAlloc) iDummy = ((TInt*)iChunkBase)[i]; 		// Don't read if WriteAllocate is tested
       
   805 	 	((TInt*)iChunkBase)[i] = pattern;
       
   806 	 	}
       
   807 		
       
   808 	Cache::SyncMemoryAfterDmaRead(iChunkBase, iSize);	//Invalidate (aka purge) cache. Data in cache should be destroyed
       
   809 	CloseSharedChunk();									// Close cached chunk.
       
   810 	
       
   811 	//Create non-cached chunk over the same physical memory
       
   812 	if (KErrNone!=(r=CreateSharedChunk(EMapAttrSupRw , iDummy))) return r;
       
   813 
       
   814 	// Counts out how many bytes have reached RAM
       
   815 	for (i=0; i<(iSize>>2); i++) if (((TInt*)iChunkBase)[i] == pattern) counter++;
       
   816 
       
   817 	info.iSize = counter<<2; //Return the number of bytes that reached the main memory
       
   818 	CloseSharedChunk();
       
   819 	FreePhysicalRam();
       
   820 	
       
   821 	kumemput(a1,&info,sizeof(info));
       
   822 	return r;
       
   823 	}
       
   824 
       
   825 /**
       
   826 Exercises SyncMemoryBeforeDmaWrite & SyncMemoryAfterDmaRead (that call L1/L2 Cache Clean & Purge methods)
       
   827 This just ensures that they do not panic (doesn't do any functional test).
       
   828 */
       
   829 TInt DCacheTest::TestL2Maintenance()
       
   830 	{
       
   831 	// Create 20000h big chunk with the the memory commited as:
       
   832 	// |0| NotCommited |64K| Commited |128K| NotCommited |192K| Commited |256K| 
       
   833 #ifdef __SMP__
       
   834 	TUint32 force_shared = EMapAttrShared;
       
   835 #else
       
   836 	TUint32 force_shared = 0;
       
   837 #endif
       
   838 	TInt r;
       
   839 	TChunkCreateInfo info;
       
   840     info.iType         = TChunkCreateInfo::ESharedKernelSingle;
       
   841 	info.iMaxSize      = 0x40000;
       
   842 	info.iMapAttr      = EMapAttrSupRw | EMapAttrCachedWBWA | EMapAttrL2CachedWBWA | force_shared;
       
   843 	info.iOwnsMemory   = ETrue; // Use memory from system's free pool
       
   844 	info.iDestroyedDfc = NULL;
       
   845 
       
   846     TLinAddr chunkAddr;
       
   847     TUint32 mapAttr;
       
   848     DChunk* chunk;
       
   849 	TInt pageSize = 0x1000; //4K
       
   850 
       
   851 	NKern::ThreadEnterCS();
       
   852     if (KErrNone != (r = Kern::ChunkCreate(info, chunk, chunkAddr, mapAttr)))
       
   853 		{
       
   854 		NKern::ThreadLeaveCS();
       
   855 		return r;
       
   856 		}
       
   857 	r = Kern::ChunkCommit(chunk,0x10000,0x10000);
       
   858     if(r!=KErrNone)
       
   859         {
       
   860 		Kern::ChunkClose(chunk);
       
   861 		NKern::ThreadLeaveCS();
       
   862 		return r;
       
   863 		}
       
   864 	r = Kern::ChunkCommit(chunk,0x30000,0x10000);
       
   865     if(r!=KErrNone)
       
   866         {
       
   867 		Kern::ChunkClose(chunk);
       
   868 		NKern::ThreadLeaveCS();
       
   869 		return r;
       
   870 		}
       
   871 
       
   872 	NKern::ThreadLeaveCS();
       
   873 
       
   874 	TInt valid = chunkAddr+0x10000;
       
   875 
       
   876 	#if defined(__ARM_L220_CACHE__) || defined(__ARM_L210_CACHE__)
       
   877 	// Check L2 cache maintenance for invalid addresses.
       
   878 	// On ARMv6, clean/purge L1 cache of the region with invalid addresses panics.
       
   879 	// However, cleaning/purging a large region above the threshold will clean/purge entire L1 cache(which doesn't panic).
       
   880 	// That is why the following calls run against 256KB. 
       
   881 	//We cannot do that on XScale L2 cache as it would generate page walk data abort.
       
   882 	TInt invalid = chunkAddr;
       
   883 	Cache::SyncMemoryBeforeDmaWrite(invalid+20, 0x40000-20);
       
   884 	Cache::SyncMemoryAfterDmaRead(invalid+100,0x40000-101);
       
   885 	#endif
       
   886 	
       
   887 	
       
   888 	// The following calls operate against valid memory regions.
       
   889 	Cache::SyncMemoryAfterDmaRead(valid+1, 0);
       
   890 	Cache::SyncMemoryAfterDmaRead(valid+32, 12);
       
   891 	Cache::SyncMemoryAfterDmaRead(valid+1, 0);
       
   892 	Cache::SyncMemoryBeforeDmaWrite(valid+2, 1);
       
   893 	Cache::SyncMemoryAfterDmaRead(valid+3, 2);
       
   894 	Cache::SyncMemoryBeforeDmaWrite(valid+4, 3);
       
   895 	Cache::SyncMemoryAfterDmaRead(valid+5, 4);
       
   896 	Cache::SyncMemoryBeforeDmaWrite(valid+6, 5);
       
   897 	Cache::SyncMemoryAfterDmaRead(valid+7, 6);
       
   898 	Cache::SyncMemoryBeforeDmaWrite(valid+8, 7);
       
   899 	Cache::SyncMemoryAfterDmaRead(valid+9, 8);
       
   900 	Cache::SyncMemoryBeforeDmaWrite(valid+10, 9);
       
   901 	Cache::SyncMemoryAfterDmaRead(valid+11, 10);
       
   902 	Cache::SyncMemoryBeforeDmaWrite(valid+12, 11);
       
   903 	Cache::SyncMemoryAfterDmaRead(valid+13, 12);
       
   904 	Cache::SyncMemoryBeforeDmaWrite(valid+14, 13);
       
   905 	Cache::SyncMemoryAfterDmaRead(valid+15, 14);
       
   906 
       
   907 	TLinAddr page = (valid+2*pageSize);
       
   908 	Cache::SyncMemoryBeforeDmaWrite(page, 0);
       
   909 	Cache::SyncMemoryAfterDmaRead(page, 0);
       
   910 	Cache::SyncMemoryBeforeDmaWrite(page-1, 2);
       
   911 	Cache::SyncMemoryAfterDmaRead(page-2, 4);
       
   912 	Cache::SyncMemoryBeforeDmaWrite(page-3, 6);
       
   913 	Cache::SyncMemoryAfterDmaRead(page-4, 8);
       
   914 	Cache::SyncMemoryBeforeDmaWrite(page-5, 10);
       
   915 	Cache::SyncMemoryAfterDmaRead(page-6, 12);
       
   916 
       
   917 	Cache::SyncMemoryBeforeDmaWrite(page, 2*pageSize);
       
   918 	Cache::SyncMemoryAfterDmaRead(page-1, 2*pageSize);
       
   919 	Cache::SyncMemoryBeforeDmaWrite(page+1, 2*pageSize);
       
   920 	Cache::SyncMemoryAfterDmaRead(page+3, 2*pageSize);
       
   921 	Cache::SyncMemoryBeforeDmaWrite(page-3, 2*pageSize);
       
   922 
       
   923 	Cache::SyncMemoryBeforeDmaWrite(valid, 64, EMapAttrCachedMax);
       
   924 	Cache::SyncMemoryBeforeDmaRead(valid, 64, EMapAttrCachedMax);
       
   925 	Cache::SyncMemoryAfterDmaRead(valid, 64, EMapAttrCachedMax);
       
   926 
       
   927 	
       
   928 	Cache::IMB_Range(0, 0xffffffff);//will cause: Clean all DCache & Purge all ICache
       
   929 	// Close the chunk
       
   930 	NKern::ThreadEnterCS();
       
   931 	Kern::ChunkClose(chunk);
       
   932 	NKern::ThreadLeaveCS();
       
   933 
       
   934 
       
   935 	//Check maintenance functions against entire cache (we need memory region >=8*cache size)
       
   936     info.iType         = TChunkCreateInfo::ESharedKernelSingle;
       
   937 	info.iMaxSize      = 0x100000; //1MB will do
       
   938 	info.iMapAttr      = EMapAttrSupRw | EMapAttrCachedWBWA | EMapAttrL2CachedWBWA | force_shared;
       
   939 	info.iOwnsMemory   = ETrue; // Use memory from system's free pool
       
   940 	info.iDestroyedDfc = NULL;
       
   941 
       
   942 	NKern::ThreadEnterCS();
       
   943     if (KErrNone != (r = Kern::ChunkCreate(info, chunk, chunkAddr, mapAttr)))
       
   944 		{
       
   945 		NKern::ThreadLeaveCS();
       
   946 		return r;
       
   947 		}
       
   948 	r = Kern::ChunkCommit(chunk,0x0,0x100000);
       
   949     if(r!=KErrNone)
       
   950         {
       
   951 		Kern::ChunkClose(chunk);
       
   952 		NKern::ThreadLeaveCS();
       
   953 		return r;
       
   954 		}
       
   955 	NKern::ThreadLeaveCS();
       
   956 
       
   957 	Cache::SyncMemoryBeforeDmaWrite(chunkAddr, 0x100000);
       
   958 	Cache::SyncMemoryAfterDmaRead(chunkAddr, 0x100000);
       
   959 
       
   960 	// Close the chunk
       
   961 	NKern::ThreadEnterCS();
       
   962 	Kern::ChunkClose(chunk);
       
   963 	NKern::ThreadLeaveCS();
       
   964 
       
   965 	return KErrNone;
       
   966 	}
       
   967 
       
   968 
       
   969 TInt DCacheTest::TestUseCase(TAny* a1)
       
   970 	{
       
   971 	TInt r = KErrNone;
       
   972 	TInt time;
       
   973 	
       
   974 	RCacheTestDevice::TChunkTest info;
       
   975 	kumemget(&info,a1,sizeof(info));
       
   976 
       
   977 	TUint32 chunkAttr = EMapAttrSupRw;
       
   978 #ifdef __SMP__
       
   979 	TUint32 force_shared = EMapAttrShared;
       
   980 #else
       
   981 	TUint32 force_shared = 0;
       
   982 #endif
       
   983 	if (info.iShared) chunkAttr |= EMapAttrShared;
       
   984 
       
   985 	switch (info.iCacheAttr)
       
   986 		{
       
   987 		case RCacheTestDevice::E_StronglyOrder:
       
   988 			new (&chunkAttr) TMappingAttributes2(EMemAttStronglyOrdered,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   989 			break;
       
   990 		case RCacheTestDevice::E_Device:
       
   991 			new (&chunkAttr) TMappingAttributes2(EMemAttDevice,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   992 			break;
       
   993 		case RCacheTestDevice::E_Normal_Uncached:
       
   994 			new (&chunkAttr) TMappingAttributes2(EMemAttNormalUncached,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   995 			break;
       
   996 		case RCacheTestDevice::E_Normal_Cached:
       
   997 			new (&chunkAttr) TMappingAttributes2(EMemAttNormalCached,EFalse,ETrue,EFalse,info.iShared?ETrue:EFalse);
       
   998 			break;
       
   999 		#if defined(__CPU_MEMORY_TYPE_REMAPPING)
       
  1000 		case RCacheTestDevice::E_InOutWT_Remapped:
       
  1001 			Remap(info.iCacheAttr);
       
  1002 			new (&chunkAttr) TMappingAttributes2(EMemAttKernelInternal4,EFalse,ETrue,EFalse,(info.iShared|force_shared)?ETrue:EFalse);
       
  1003 		#else
       
  1004 		case RCacheTestDevice::E_InOutWT:		chunkAttr |= EMapAttrCachedWTRA|EMapAttrL2CachedWTRA|force_shared;
       
  1005 		#endif
       
  1006 			break;
       
  1007 		default:
       
  1008 			return KErrArgument;		
       
  1009 		}
       
  1010 
       
  1011 	// Create chunk
       
  1012 	if (KErrNone!=(r=AllocPhysicalRam(Kern::RoundToPageSize(info.iSize)))) return r;
       
  1013 	if (KErrNone!=(r=CreateSharedChunk(chunkAttr, info.iActualMapAttr))) return r;
       
  1014 	
       
  1015 	//Alloc from the heap
       
  1016 	NKern::ThreadEnterCS();
       
  1017 	iHeap1 = (TInt*)Kern::Alloc(Max(info.iSize,0x8000));
       
  1018 	if (iHeap1==NULL) {NKern::ThreadLeaveCS();return KErrNoMemory;}
       
  1019 	iHeap2 = (TInt*)Kern::Alloc(0x8000);
       
  1020 	if (iHeap2==NULL) {Kern::Free((TAny*)iHeap1);NKern::ThreadLeaveCS();return KErrNoMemory;}
       
  1021 	NKern::ThreadLeaveCS();
       
  1022 	
       
  1023 	Cache::SyncMemoryAfterDmaRead(iChunkBase, info.iSize); // Invalidate (aka purge) cache.
       
  1024 	time = NKern::TickCount();
       
  1025 	switch(info.iUseCase)
       
  1026 		{
       
  1027 		case 0:  r = UseCase_ReadFromChunk(info);break;
       
  1028 		case 1:  r = UseCase_ReadFromChunk_ReadFromHeap(info);break;
       
  1029 		case 2:  r = UseCase_WriteToChunk(info);break;
       
  1030 		case 3:  r = UseCase_WriteToChunk_ReadFromHeap(info);break;
       
  1031 		default: r = KErrArgument;
       
  1032 		}
       
  1033 	info.iTime = NKern::TickCount() - time;
       
  1034 
       
  1035 	NKern::ThreadEnterCS();
       
  1036 	Kern::Free((TAny*)iHeap1);
       
  1037 	Kern::Free((TAny*)iHeap2);
       
  1038 	NKern::ThreadLeaveCS();
       
  1039 	
       
  1040 	CloseSharedChunk();
       
  1041 	FreePhysicalRam();
       
  1042 
       
  1043 	kumemput(a1,&info,sizeof(info));
       
  1044 	return r;
       
  1045 	}
       
  1046 
       
  1047 TInt DCacheTest::UseCase_ReadFromChunk(RCacheTestDevice::TChunkTest& info)
       
  1048 	{
       
  1049 	TInt i;
       
  1050 	for (i=0; i< info.iLoops; i++)
       
  1051 		{
       
  1052 		//Simulate - evict the chunk from the cache)
       
  1053 		Cache::SyncMemoryBeforeDmaRead(iChunkBase, info.iSize, info.iActualMapAttr); // Invalidate (aka purge) cache.
       
  1054 
       
  1055 		//Read DMA data
       
  1056 		memcpy((TAny*)iHeap1, (const TAny*)iChunkBase, info.iSize);
       
  1057 		//for (j=0; j < info.iSize>>2; j++) iDummy = *((TInt*)iChunkBase+j);
       
  1058 		}
       
  1059 	return KErrNone;
       
  1060 	}
       
  1061 
       
  1062 TInt DCacheTest::UseCase_ReadFromChunk_ReadFromHeap(RCacheTestDevice::TChunkTest& info)
       
  1063 	{
       
  1064 	TInt i;
       
  1065 	for (i=0; i< info.iLoops; i++)
       
  1066 		{
       
  1067 		//Simulate - evict the chunk memory from the cache
       
  1068 		Cache::SyncMemoryBeforeDmaRead(iChunkBase, info.iSize, info.iActualMapAttr); // Invalidate (aka purge) cache.
       
  1069 
       
  1070 		//Read DMA memory
       
  1071 		memcpy((TAny*)iHeap1, (const TAny*)iChunkBase, info.iSize);
       
  1072 
       
  1073 		//Simulate Kernel activities - reading heap2
       
  1074 		memcpy((TAny*)iHeap1, (const TAny*)iHeap2, 0x8000);
       
  1075 		}
       
  1076 	return KErrNone;
       
  1077 	}
       
  1078 
       
  1079 TInt DCacheTest::UseCase_WriteToChunk(RCacheTestDevice::TChunkTest& info)
       
  1080 	{
       
  1081 	TInt i;
       
  1082 	for (i=0; i< info.iLoops; i++)
       
  1083 		{
       
  1084 		//Simulate - evict the chunk memory from the cache
       
  1085 		Cache::SyncMemoryBeforeDmaRead(iChunkBase, info.iSize, info.iActualMapAttr); // Invalidate (aka purge) cache.
       
  1086 
       
  1087 		//Write DMA memory
       
  1088 		memcpy((TAny*)iChunkBase, (const TAny*)iHeap1, info.iSize);
       
  1089 		Cache::SyncMemoryBeforeDmaWrite(iChunkBase, info.iSize, info.iActualMapAttr); // Clean cache.
       
  1090 
       
  1091 		}
       
  1092 	return KErrNone;
       
  1093 	}
       
  1094 
       
  1095 TInt DCacheTest::UseCase_WriteToChunk_ReadFromHeap(RCacheTestDevice::TChunkTest& info)
       
  1096 	{
       
  1097 	TInt i;
       
  1098 	for (i=0; i< info.iLoops; i++)
       
  1099 		{
       
  1100 		//Simulate - evict the chunk memory from the cache
       
  1101 		Cache::SyncMemoryBeforeDmaRead(iChunkBase, info.iSize, info.iActualMapAttr); // Invalidate (aka purge) cache.
       
  1102 
       
  1103 		//Write DMA memory
       
  1104 		memcpy((TAny*)iChunkBase, (const TAny*)iHeap1, info.iSize);
       
  1105 		Cache::SyncMemoryBeforeDmaWrite(iChunkBase, info.iSize, info.iActualMapAttr); // Clean cache.
       
  1106 		
       
  1107 		//Simulate Kernel activities - reading heap2
       
  1108 		memcpy((TAny*)iHeap1, (const TAny*)iHeap2, 0x8000);
       
  1109 		}
       
  1110 	return KErrNone;
       
  1111 	}
       
  1112 
       
  1113 
       
  1114 // Entry point
       
  1115 TInt DCacheTest::Request(TInt aFunction, TAny* a1, TAny* a2)
       
  1116 	{
       
  1117 	TInt r = KErrNone;
       
  1118 #ifdef __SMP__
       
  1119 	TUint32 affinity = NKern::ThreadSetCpuAffinity(NKern::CurrentThread(), 0);
       
  1120 #endif
       
  1121 	switch (aFunction)
       
  1122 	{
       
  1123 		case RCacheTestDevice::EGetCacheInfo:				r = GetCacheInfo(a1);		break;
       
  1124 		case RCacheTestDevice::ETestDataChunk:				r = TestDataChunk(a1);		break;
       
  1125 		case RCacheTestDevice::ETestCodeChunk:				r = TestCodeChunk(a1);		break;
       
  1126 		case RCacheTestDevice::ETestWriteBackReadAllocate:	r = TestWriteBackMode(a1, EFalse);	break;
       
  1127 		case RCacheTestDevice::ETestWriteBackWriteAllocate:	r = TestWriteBackMode(a1, ETrue);	break;
       
  1128 		case RCacheTestDevice::ETesL2Maintenance:			r = TestL2Maintenance();	break;
       
  1129 		case RCacheTestDevice::EGetThreshold:				r = GetThreshold(a1);		break;
       
  1130 		case RCacheTestDevice::ESetThreshold:				r = SetThreshold(a1);		break;
       
  1131 		case RCacheTestDevice::ETestUseCase:				r = TestUseCase(a1);		break;
       
  1132 		default:											r=KErrNotSupported;
       
  1133 		}
       
  1134 #ifdef __SMP__
       
  1135 	NKern::ThreadSetCpuAffinity(NKern::CurrentThread(), affinity);
       
  1136 #endif
       
  1137 	return r;
       
  1138 	}
       
  1139 
       
  1140 //////////////////////////////////////////
       
  1141 class DTestFactory : public DLogicalDevice
       
  1142 	{
       
  1143 public:
       
  1144 	DTestFactory();
       
  1145 	// from DLogicalDevice
       
  1146 	virtual TInt Install();
       
  1147 	virtual void GetCaps(TDes8& aDes) const;
       
  1148 	virtual TInt Create(DLogicalChannelBase*& aChannel);
       
  1149 	};
       
  1150 
       
  1151 DTestFactory::DTestFactory()
       
  1152     {
       
  1153     iParseMask = KDeviceAllowUnit;
       
  1154     iUnitsMask = 0x3;
       
  1155     }
       
  1156 
       
  1157 TInt DTestFactory::Create(DLogicalChannelBase*& aChannel)
       
  1158     {
       
  1159 	CacheTestDriver = new DCacheTest;
       
  1160 	aChannel = CacheTestDriver;
       
  1161 	return (aChannel ? KErrNone : KErrNoMemory);
       
  1162     }
       
  1163 
       
  1164 TInt DTestFactory::Install()
       
  1165     {
       
  1166     return SetName(&KCacheTestDriverName);
       
  1167     }
       
  1168 
       
  1169 void DTestFactory::GetCaps(TDes8& /*aDes*/) const
       
  1170     {
       
  1171     }
       
  1172 
       
  1173 DECLARE_STANDARD_LDD()
       
  1174 	{
       
  1175     return new DTestFactory;
       
  1176 	}