kernel/eka/klib/arm/cbma.cia
changeset 201 43365a9b78a3
parent 0 a41df078684a
equal deleted inserted replaced
200:73ea206103e6 201:43365a9b78a3
   334 	asm("0: ");
   334 	asm("0: ");
   335 	ASM_FAULT();
   335 	ASM_FAULT();
   336 	}
   336 	}
   337 
   337 
   338 
   338 
       
   339 /**	Allocates a specific range of bit positions.
       
   340 	
       
   341 	The specified range must lie within the total range for this allocator but it is
       
   342 	not necessary that all the positions are currently free.
       
   343 
       
   344 	@param	aStart	First position to allocate.
       
   345 	@param	aLength	Number of consecutive positions to allocate, must be >0.
       
   346 	@return The number of previously free positions that were allocated.
       
   347  */
       
   348 EXPORT_C __NAKED__ TUint TBitMapAllocator::SelectiveAlloc(TInt /*aStart*/, TInt /*aLength*/)
       
   349 	{
       
   350 	asm("ldr r3, [r0, #%a0] " : : "i" _FOFF(TBitMapAllocator, iSize));	// r3->iSize
       
   351 	asm("stmfd sp!, {r4-r8,lr} ");
       
   352 	asm("adds r4, r1, r2 ");			// r4 = aStart + aLength
       
   353 	asm("bcs 0f ");						// if (aStart + aLength < aStart)
       
   354 	asm("cmp r4, r3 ");					// if (aStart + aLength > iSize)
       
   355 	asm("bhi 0f ");
       
   356 	asm("mov r7, r0 ");					// r7 = this
       
   357 	asm("mov r4, r1, lsr #5 ");			// r4 = wix = aStart >> 5
       
   358 	asm("and r1, r1, #0x1f ");			// r1 = sbit = aStart & 31
       
   359 	asm("ldr r6, [r7, #%a0] " : : "i" _FOFF(TBitMapAllocator, iAvail));	// r6 = iAvail
       
   360 	asm("add r4, r7, r4, lsl #2 ");	
       
   361 	asm("add r4, r4, #%a0 " : : "i" _FOFF(TBitMapAllocator, iMap));		// r4 = iMap + wix
       
   362 	asm("sub r6, r6, r2 ");				// r6 = iAvail -= aLength
       
   363 	asm("add r5, r2, r1 ");				// r5 = ebit =  sbit + aLength
       
   364 	asm("mvn r0, #0 ");
       
   365 	asm("mvn r0, r0, lsr r1 ");			// r0 = b = ~(0xffffffff >> sbit)
       
   366 	asm("cmp r5, #32 ");
       
   367 	asm("mov r8, r2");					// r8 = aLength
       
   368 	asm("bhi salloc_cross_bdry ");		// branch if (ebit >=32)
       
   369 
       
   370 	asm("mvn r5, #0 ");					// r5 = 0xffffffff
       
   371 	asm("mov r5, r5, lsr r8 ");			// r5 >> aLength
       
   372 	asm("mov r5, r5, lsr r1 ");			// r5 >> sbit
       
   373 	asm("orr r5, r5, r0 ");				// r5 = b = r0 | r5
       
   374 	asm("ldr r0, [r4] ");				// r0 = w = *pW
       
   375 	asm("and r1, r0, r5 ");				// r1 = w & b, clear the positions to be allocated
       
   376 	asm("str r1, [r4] ");				// *pW = r1, store new bit map word.
       
   377 	asm("mvn r0, r0");					// r0 = ~w
       
   378 	asm("mvn r5, r5");					// r5 = ~b
       
   379 	asm("and r0, r0, r5");				// r0 = ~w & ~b
       
   380 	asm("bl " CSM_CFUNC(__e32_bit_count_32));
       
   381 	asm("add r6, r6, r0 ");				// r6 = iAvail + allocated
       
   382 	asm("sub r0, r8, r0 ");				// return aLength - allocated
       
   383 	asm("str r6, [r7] ");				// iAvail += allocated, store free count
       
   384 	asm("ldmfd sp!, {r4-r8,pc} ");		// return
       
   385 	
       
   386 	asm("salloc_cross_bdry: ");			// r0 = b, r8 = aLength, r7 = this, r5 = ebit
       
   387 	asm("ldr r2, [r4] ");				// r2 = w = *pW
       
   388 	asm("and r1, r2, r0 ");				// r1 = w & b
       
   389 	asm("str r1, [r4], #4 ");			// *pW++ = r1, store new bit mask
       
   390 	asm("mvn r2, r2");					// r2 = ~w
       
   391 	asm("mvn r0, r0");					// r0 = ~b
       
   392 	asm("and r0, r0, r2");				// r0 = ~w & ~b
       
   393 	asm("bl " CSM_CFUNC(__e32_bit_count_32));
       
   394 	asm("add r6, r6, r0 ");				// r6 = iAvail += allocated
       
   395 	asm("sub r8, r8, r0 ");				// r8 = aLength -= allocated
       
   396 	asm("subs r5, r5, #32 ");			// r5 = ebit -= 32
       
   397 	asm("bls salloc_return ");			// ebit < 0 so return.
       
   398 	asm("cmp r5, #32 ");				// if (ebit < 32) {
       
   399 	asm("mvnlt r0, #0 ");
       
   400 	asm("movlt r0, r0, lsr r5 ");		// r0 = 0xffffffff >> ebit }
       
   401 	asm("movge r0, #0 ");				// if (ebit >= 32) r0 = b = 0
       
   402 	asm("b salloc_cross_bdry ");
       
   403 
       
   404 	asm("salloc_return: ");
       
   405 	asm("str r6, [r7] ");				// iAvail += allocated, store free count
       
   406 	asm("mov r0, r8 ");					// return aLength
       
   407 	asm("ldmfd sp!, {r4-r8,pc} ");		// return
       
   408 
       
   409 	asm("0: ");
       
   410 	ASM_FAULT();
       
   411 	}
       
   412 
       
   413 
   339 /**	Tests if a specific range of bit positions are all free
   414 /**	Tests if a specific range of bit positions are all free
   340 	Specified range must lie within the total range for this allocator.
   415 	Specified range must lie within the total range for this allocator.
   341 
   416 
   342 	@param	aStart	First position to check
   417 	@param	aStart	First position to check
   343 	@param	aLength	Number of consecutive positions to check, must be >0
   418 	@param	aLength	Number of consecutive positions to check, must be >0