1 // Copyright (c) 2008-2010 Nokia Corporation and/or its subsidiary(-ies). |
1 // Copyright (c) 2010 Nokia Corporation and/or its subsidiary(-ies). |
2 // All rights reserved. |
2 // All rights reserved. |
3 // This component and the accompanying materials are made available |
3 // This component and the accompanying materials are made available |
4 // under the terms of the License "Eclipse Public License v1.0" |
4 // under the terms of the License "Eclipse Public License v1.0" |
5 // which accompanies this distribution, and is available |
5 // which accompanies this distribution, and is available |
6 // at the URL " http://www.eclipse.org/legal/epl-v10.html ". |
6 // at the URL " http://www.eclipse.org/legal/epl-v10.html ". |
83 __DATA_MEMORY_BARRIER_Z__(r12); |
83 __DATA_MEMORY_BARRIER_Z__(r12); |
84 asm("1: "); |
84 asm("1: "); |
85 LDREX(3,1); // r3 = iIdlingCpus |
85 LDREX(3,1); // r3 = iIdlingCpus |
86 asm("orr r3,r0,r3"); // orr in mask for this CPU |
86 asm("orr r3,r0,r3"); // orr in mask for this CPU |
87 asm("cmp r3,r2"); // compare to iAllEngagedCpusMask |
87 asm("cmp r3,r2"); // compare to iAllEngagedCpusMask |
88 asm("orreq r3,r3,#%a0" : : "i" (TIdleSupport::KGlobalIdleFlag)); // if equal orr in KGlobalIdleFlag |
88 asm("orreq r3,r3,#%a0" : : "i" ((TInt)TIdleSupport::KGlobalIdleFlag)); // if equal orr in KGlobalIdleFlag |
89 STREX(12,3,1); |
89 STREX(12,3,1); |
90 asm("cmp r12, #0 "); // |
90 asm("cmp r12, #0 "); // |
91 asm("bne 1b "); // write didn't succeed try again |
91 asm("bne 1b "); // write didn't succeed try again |
92 __DATA_MEMORY_BARRIER__(r12); |
92 __DATA_MEMORY_BARRIER__(r12); |
93 asm("and r0,r3,#%a0" : : "i" (TIdleSupport::KGlobalIdleFlag)); |
93 asm("and r0,r3,#%a0" : : "i" ((TInt)TIdleSupport::KGlobalIdleFlag)); |
94 __JUMP(,lr); |
94 __JUMP(,lr); |
95 asm("__iAllEngagedCpusMask:"); |
95 asm("__iAllEngagedCpusMask:"); |
96 asm(".word %a0" : : "i" ((TInt)&TIdleSupport::iAllEngagedCpusMask));// |
96 asm(".word %a0" : : "i" ((TInt)&TIdleSupport::iAllEngagedCpusMask));// |
97 } |
97 } |
98 |
98 |
123 __NAKED__ void TSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
123 __NAKED__ void TSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
124 { |
124 { |
125 asm("stmfd sp!, {r4-r5,lr} "); |
125 asm("stmfd sp!, {r4-r5,lr} "); |
126 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
126 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
127 asm("ldr r4,[r0,#4]"); |
127 asm("ldr r4,[r0,#4]"); |
128 asm("ldr r4,[r4]") |
128 asm("ldr r4,[r4]"); |
129 __DATA_MEMORY_BARRIER_Z__(r12); // |
129 __DATA_MEMORY_BARRIER_Z__(r12); // |
130 asm("1: "); |
130 asm("1: "); |
131 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
131 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
132 asm("mov r5,r2,lsr #16"); // r5 has old staging value |
132 asm("mov r5,r2,lsr #16"); // r5 has old staging value |
133 asm("and r2,r2,r4"); // r2 has currently waiting cpus |
133 asm("and r2,r2,r4"); // r2 has currently waiting cpus |
147 asm("cmp r2,r4"); // if r2 == r4 then all cpus have set |
147 asm("cmp r2,r4"); // if r2 == r4 then all cpus have set |
148 ARM_SEVcc(CC_EQ); |
148 ARM_SEVcc(CC_EQ); |
149 #endif |
149 #endif |
150 asm("2: "); |
150 asm("2: "); |
151 asm("cmp r3,r5"); // all (old stage does not equal new stage) |
151 asm("cmp r3,r5"); // all (old stage does not equal new stage) |
152 asm("ldmfdne sp!, {r4-r5,pc}"); // yup return |
152 asm("ldmnefd sp!, {r4-r5,pc}"); // yup return |
153 #ifdef SYNCPOINT_WFE |
153 #ifdef SYNCPOINT_WFE |
154 __DATA_MEMORY_BARRIER__(r12); |
154 __DATA_MEMORY_BARRIER__(r12); |
155 ARM_WFE; |
155 ARM_WFE; |
156 #endif |
156 #endif |
157 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
157 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
186 __NAKED__ void TBreakableSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
186 __NAKED__ void TBreakableSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
187 { |
187 { |
188 asm("stmfd sp!, {r4,lr} "); |
188 asm("stmfd sp!, {r4,lr} "); |
189 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
189 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
190 asm("ldr r4,[r0,#4]"); |
190 asm("ldr r4,[r0,#4]"); |
191 asm("ldr r4,[r4]") |
191 asm("ldr r4,[r4]"); |
192 __DATA_MEMORY_BARRIER_Z__(r12); // |
192 __DATA_MEMORY_BARRIER_Z__(r12); // |
193 asm("1: "); |
193 asm("1: "); |
194 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
194 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
195 asm("ands r3,r2,#0x80000000"); // |
195 asm("ands r3,r2,#0x80000000"); // |
196 asm("bne 3f"); // sync point broken so return |
196 asm("bne 3f"); // sync point broken so return |
206 asm("ands r3,r2,#0x80000000"); // MSB set? |
206 asm("ands r3,r2,#0x80000000"); // MSB set? |
207 ARM_SEVcc(CC_NE); |
207 ARM_SEVcc(CC_NE); |
208 #endif |
208 #endif |
209 asm("2: "); |
209 asm("2: "); |
210 asm("ands r3,r2,#0x80000000"); // MSB set? |
210 asm("ands r3,r2,#0x80000000"); // MSB set? |
211 asm("ldmfdne sp!, {r4,pc}"); // yup return |
211 asm("ldmnefd sp!, {r4,pc}"); // yup return |
212 #ifdef SYNCPOINT_WFE |
212 #ifdef SYNCPOINT_WFE |
213 __DATA_MEMORY_BARRIER__(r12); |
213 __DATA_MEMORY_BARRIER__(r12); |
214 ARM_WFE; |
214 ARM_WFE; |
215 #endif |
215 #endif |
216 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
216 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |