83 __DATA_MEMORY_BARRIER_Z__(r12); |
83 __DATA_MEMORY_BARRIER_Z__(r12); |
84 asm("1: "); |
84 asm("1: "); |
85 LDREX(3,1); // r3 = iIdlingCpus |
85 LDREX(3,1); // r3 = iIdlingCpus |
86 asm("orr r3,r0,r3"); // orr in mask for this CPU |
86 asm("orr r3,r0,r3"); // orr in mask for this CPU |
87 asm("cmp r3,r2"); // compare to iAllEngagedCpusMask |
87 asm("cmp r3,r2"); // compare to iAllEngagedCpusMask |
88 asm("orreq r3,r3,#%a0" : : "i" (TIdleSupport::KGlobalIdleFlag)); // if equal orr in KGlobalIdleFlag |
88 asm("orreq r3,r3,#%a0" : : "i" ((TUint32)TIdleSupport::KGlobalIdleFlag)); // if equal orr in KGlobalIdleFlag |
89 STREX(12,3,1); |
89 STREX(12,3,1); |
90 asm("cmp r12, #0 "); // |
90 asm("cmp r12, #0 "); // |
91 asm("bne 1b "); // write didn't succeed try again |
91 asm("bne 1b "); // write didn't succeed try again |
92 __DATA_MEMORY_BARRIER__(r12); |
92 __DATA_MEMORY_BARRIER__(r12); |
93 asm("and r0,r3,#%a0" : : "i" (TIdleSupport::KGlobalIdleFlag)); |
93 asm("and r0,r3,#%a0" : : "i" ((TUint32)TIdleSupport::KGlobalIdleFlag)); |
94 __JUMP(,lr); |
94 __JUMP(,lr); |
95 asm("__iAllEngagedCpusMask:"); |
95 asm("__iAllEngagedCpusMask:"); |
96 asm(".word %a0" : : "i" ((TInt)&TIdleSupport::iAllEngagedCpusMask));// |
96 asm(".word %a0" : : "i" ((TInt)&TIdleSupport::iAllEngagedCpusMask));// |
97 } |
97 } |
98 |
98 |
123 __NAKED__ void TSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
123 __NAKED__ void TSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
124 { |
124 { |
125 asm("stmfd sp!, {r4-r5,lr} "); |
125 asm("stmfd sp!, {r4-r5,lr} "); |
126 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
126 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
127 asm("ldr r4,[r0,#4]"); |
127 asm("ldr r4,[r0,#4]"); |
128 asm("ldr r4,[r4]") |
128 asm("ldr r4,[r4]"); |
129 __DATA_MEMORY_BARRIER_Z__(r12); // |
129 __DATA_MEMORY_BARRIER_Z__(r12); // |
130 asm("1: "); |
130 asm("1: "); |
131 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
131 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
132 asm("mov r5,r2,lsr #16"); // r5 has old staging value |
132 asm("mov r5,r2,lsr #16"); // r5 has old staging value |
133 asm("and r2,r2,r4"); // r2 has currently waiting cpus |
133 asm("and r2,r2,r4"); // r2 has currently waiting cpus |
147 asm("cmp r2,r4"); // if r2 == r4 then all cpus have set |
147 asm("cmp r2,r4"); // if r2 == r4 then all cpus have set |
148 ARM_SEVcc(CC_EQ); |
148 ARM_SEVcc(CC_EQ); |
149 #endif |
149 #endif |
150 asm("2: "); |
150 asm("2: "); |
151 asm("cmp r3,r5"); // all (old stage does not equal new stage) |
151 asm("cmp r3,r5"); // all (old stage does not equal new stage) |
152 asm("ldmfdne sp!, {r4-r5,pc}"); // yup return |
152 asm("bne 3f"); // yup return |
153 #ifdef SYNCPOINT_WFE |
153 #ifdef SYNCPOINT_WFE |
154 __DATA_MEMORY_BARRIER__(r12); |
154 __DATA_MEMORY_BARRIER__(r12); |
155 ARM_WFE; |
155 ARM_WFE; |
156 #endif |
156 #endif |
157 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
157 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
158 __DATA_MEMORY_BARRIER__(r12); // ensure read is observed |
158 __DATA_MEMORY_BARRIER__(r12); // ensure read is observed |
159 asm("mov r3,r2,lsr #16"); // re-read new stage |
159 asm("mov r3,r2,lsr #16"); // re-read new stage |
160 asm("b 2b"); // loop back |
160 asm("b 2b"); // loop back |
|
161 asm("3: "); |
|
162 asm("ldmfd sp!, {r4-r5,pc}"); // return |
161 } |
163 } |
162 |
164 |
163 /** |
165 /** |
164 |
166 |
165 Wait for all CPUs to reach the sync point. A CPU will only exit this function when all other CPUs |
167 Wait for all CPUs to reach the sync point. A CPU will only exit this function when all other CPUs |
186 __NAKED__ void TBreakableSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
188 __NAKED__ void TBreakableSyncPoint::DoSW(TUint32 /*aCpuMask*/) |
187 { |
189 { |
188 asm("stmfd sp!, {r4,lr} "); |
190 asm("stmfd sp!, {r4,lr} "); |
189 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
191 asm("add r0,r0,#%a0" : : "i" _FOFF(TSyncPointBase, iStageAndCPUWaitingMask)); // skip vt |
190 asm("ldr r4,[r0,#4]"); |
192 asm("ldr r4,[r0,#4]"); |
191 asm("ldr r4,[r4]") |
193 asm("ldr r4,[r4]"); |
192 __DATA_MEMORY_BARRIER_Z__(r12); // |
194 __DATA_MEMORY_BARRIER_Z__(r12); // |
193 asm("1: "); |
195 asm("1: "); |
194 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
196 LDREX(2,0); // r2 = iStageAndCPUWaitingMask, r4 = iAllEnagedCpusMask |
195 asm("ands r3,r2,#0x80000000"); // |
197 asm("ands r3,r2,#0x80000000"); // |
196 asm("bne 3f"); // sync point broken so return |
198 asm("bne 3f"); // sync point broken so return |
206 asm("ands r3,r2,#0x80000000"); // MSB set? |
208 asm("ands r3,r2,#0x80000000"); // MSB set? |
207 ARM_SEVcc(CC_NE); |
209 ARM_SEVcc(CC_NE); |
208 #endif |
210 #endif |
209 asm("2: "); |
211 asm("2: "); |
210 asm("ands r3,r2,#0x80000000"); // MSB set? |
212 asm("ands r3,r2,#0x80000000"); // MSB set? |
211 asm("ldmfdne sp!, {r4,pc}"); // yup return |
213 asm("bne 4f"); // yup return |
212 #ifdef SYNCPOINT_WFE |
214 #ifdef SYNCPOINT_WFE |
213 __DATA_MEMORY_BARRIER__(r12); |
215 __DATA_MEMORY_BARRIER__(r12); |
214 ARM_WFE; |
216 ARM_WFE; |
215 #endif |
217 #endif |
216 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
218 asm("ldr r2,[r0]"); // otherwise re read iWaitingCpusMask into r5 |
290 { |
293 { |
291 return 0; |
294 return 0; |
292 } |
295 } |
293 #endif |
296 #endif |
294 |
297 |
295 __NAKED__ TInt TIdleSupport::IntPending() |
298 __NAKED__ TUint32 TIdleSupport::IntPending() |
296 { |
299 { |
297 asm("ldr r1,__KCPUIFAddr");//r1 = address of iBaseIntIfAddress |
300 asm("ldr r1,__KCPUIFAddr");//r1 = address of iBaseIntIfAddress |
298 asm("ldr r1, [r1]");//r1 = address of Hw GIC CPU interrupt interface base address |
301 asm("ldr r1, [r1]");//r1 = address of Hw GIC CPU interrupt interface base address |
299 asm("ldr r0, [r1, #%a0]" : : "i" _FOFF(GicCpuIfc, iHighestPending)); |
302 asm("ldr r0, [r1, #%a0]" : : "i" _FOFF(GicCpuIfc, iHighestPending)); |
300 __JUMP(,lr); |
303 __JUMP(,lr); |