0
|
1 |
// Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
2 |
// All rights reserved.
|
|
3 |
// This component and the accompanying materials are made available
|
|
4 |
// under the terms of the License "Eclipse Public License v1.0"
|
|
5 |
// which accompanies this distribution, and is available
|
|
6 |
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
7 |
//
|
|
8 |
// Initial Contributors:
|
|
9 |
// Nokia Corporation - initial contribution.
|
|
10 |
//
|
|
11 |
// Contributors:
|
|
12 |
//
|
|
13 |
// Description:
|
|
14 |
// e32\nkernsmp\arm\nctimer.cia
|
|
15 |
// Fast Millisecond Timer Implementation
|
|
16 |
//
|
|
17 |
//
|
|
18 |
|
|
19 |
#include <e32cia.h>
|
|
20 |
#include <arm.h>
|
|
21 |
|
|
22 |
#ifdef __MSTIM_MACHINE_CODED__
|
|
23 |
|
|
24 |
|
|
25 |
#ifdef _DEBUG
|
|
26 |
#define ASM_KILL_LINK(rp,rs) asm("mov "#rs", #0xdf ");\
|
|
27 |
asm("orr "#rs", "#rs", "#rs", lsl #8 ");\
|
|
28 |
asm("orr "#rs", "#rs", "#rs", lsl #16 ");\
|
|
29 |
asm("str "#rs", ["#rp"] ");\
|
|
30 |
asm("str "#rs", ["#rp", #4] ");
|
|
31 |
|
|
32 |
#define ASM_KILL_LINK_OFFSET(rp,rs,offset) asm("mov "#rs", #0xdf ");\
|
|
33 |
asm("orr "#rs", "#rs", "#rs", lsl #8 ");\
|
|
34 |
asm("orr "#rs", "#rs", "#rs", lsl #16 ");\
|
|
35 |
asm("str "#rs", ["#rp", #"#offset"] ");\
|
|
36 |
asm("str "#rs", ["#rp", #"#offset"+4] ");
|
|
37 |
#else
|
|
38 |
#define ASM_KILL_LINK(rp,rs)
|
|
39 |
#define ASM_KILL_LINK_OFFSET(rp,rs,offset)
|
|
40 |
#endif
|
|
41 |
|
|
42 |
#ifdef _DEBUG
|
|
43 |
#define __DEBUG_CALLBACK(n) asm("stmfd sp!, {r0-r3,r12,lr} "); \
|
|
44 |
asm("ldr r0, __TheTimerQ "); \
|
|
45 |
asm("ldr r12, [r0, #%a0]!" : : "i" _FOFF(NTimerQ,iDebugFn)); \
|
|
46 |
asm("cmp r12, #0 "); \
|
|
47 |
asm("movne r1, #" #n ); \
|
|
48 |
asm("ldrne r0, [r0, #4] "); \
|
|
49 |
asm("movne lr, pc "); \
|
|
50 |
__JUMP(ne,r12); \
|
|
51 |
asm("ldmfd sp!, {r0-r3,r12,lr} ")
|
|
52 |
#else
|
|
53 |
#define __DEBUG_CALLBACK(n)
|
|
54 |
#endif
|
|
55 |
|
|
56 |
|
|
57 |
/** Start a nanokernel timer in zero-drift periodic mode with ISR or DFC callback.
|
|
58 |
Queues the timer to expire in the specified number of nanokernel ticks,
|
|
59 |
measured from the time at which it last expired. This allows exact periodic
|
|
60 |
timers to be implemented with no drift caused by delays in requeueing the
|
|
61 |
timer.
|
|
62 |
The expiry handler will be called in the same context as the previous timer
|
|
63 |
expiry. Generally the way this is used is that NTimer::OneShot() is used to start
|
|
64 |
the first time interval and this specifies whether the callback is in ISR context
|
|
65 |
or in the context of the nanokernel timer thread (DfcThread1) or other Dfc thread.
|
|
66 |
The expiry handler then uses NTimer::Again() to requeue the timer.
|
|
67 |
|
|
68 |
@param aTime Timeout in nanokernel ticks
|
|
69 |
@return KErrNone if no error
|
|
70 |
@return KErrInUse if timer is already active
|
|
71 |
@return KErrArgument if the requested expiry time is in the past
|
|
72 |
@pre Any context
|
|
73 |
*/
|
|
74 |
__NAKED__ EXPORT_C TInt NTimer::Again(TInt /*aTime*/)
|
|
75 |
{
|
|
76 |
asm("mrs r12, cpsr ");
|
|
77 |
INTS_OFF(r3, r12, INTS_ALL_OFF); // all interrupts off
|
|
78 |
asm("ldrb r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // r3=iState
|
|
79 |
asm("ldr r2, __TheTimerQ ");
|
|
80 |
asm("cmp r3, #%a0" : : "i" ((TInt)EIdle));
|
|
81 |
asm("ldreq r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // r3=iTriggerTime
|
|
82 |
asm("bne add_mscb_in_use "); // if already queued return KErrInUse
|
|
83 |
asm("add r3, r3, r1 "); // add requested time interval
|
|
84 |
asm("ldr r1, [r2, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // r1=iMsCount
|
|
85 |
asm("subs r1, r3, r1 "); // r1=trigger time-next tick time
|
|
86 |
asm("strpl r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // iTriggerTime+=aTime
|
|
87 |
asm("bpl AddMsCallBack "); // if time interval positive, ok
|
|
88 |
asm("mov r0, #%a0" : : "i" ((TInt)KErrArgument)); // else return KErrArgument
|
|
89 |
asm("b add_mscb_0 ");
|
|
90 |
}
|
|
91 |
|
|
92 |
|
|
93 |
/** Start a nanokernel timer in one-shot mode with ISR callback.
|
|
94 |
Queues the timer to expire in the specified number of nanokernel ticks. The
|
|
95 |
actual wait time will be at least that much and may be up to one tick more.
|
|
96 |
The expiry handler will be called in ISR context.
|
|
97 |
|
|
98 |
@param aTime Timeout in nanokernel ticks
|
|
99 |
@return KErrNone if no error
|
|
100 |
@return KErrInUse if timer is already active
|
|
101 |
@pre Any context
|
|
102 |
*/
|
|
103 |
__NAKED__ EXPORT_C TInt NTimer::OneShot(TInt /*aTime*/)
|
|
104 |
{
|
|
105 |
asm("mov r2, #0 ");
|
|
106 |
// fall through
|
|
107 |
}
|
|
108 |
|
|
109 |
|
|
110 |
/** Start a nanokernel timer in one-shot mode with ISR or DFC callback.
|
|
111 |
Queues the timer to expire in the specified number of nanokernel ticks. The
|
|
112 |
actual wait time will be at least that much and may be up to one tick more.
|
|
113 |
The expiry handler will be called in either ISR context or in the context
|
|
114 |
of the nanokernel timer thread (DfcThread1).
|
|
115 |
|
|
116 |
@param aTime Timeout in nanokernel ticks
|
|
117 |
@param aDfc TRUE if DFC callback required, FALSE if ISR callback required.
|
|
118 |
@return KErrNone if no error
|
|
119 |
@return KErrInUse if timer is already active
|
|
120 |
@pre Any context
|
|
121 |
*/
|
|
122 |
__NAKED__ EXPORT_C TInt NTimer::OneShot(TInt /*aTime*/, TBool /*aDfc*/)
|
|
123 |
{
|
|
124 |
asm("mrs r12, cpsr ");
|
|
125 |
INTS_OFF(r3, r12, INTS_ALL_OFF); // all interrupts off
|
|
126 |
asm("ldrb r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // r3=iState
|
|
127 |
asm("cmp r3, #%a0" : : "i" ((TInt)EIdle));
|
|
128 |
asm("bne add_mscb_in_use "); // if already queued return KErrInUse
|
|
129 |
asm("strb r2, [r0, #%a0]" : : "i" _FOFF(NTimer,iCompleteInDfc)); // iCompleteInDfc=aDfc
|
|
130 |
asm("ldr r2, __TheTimerQ ");
|
|
131 |
asm("ldr r3, [r2, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // r3=iMsCount
|
|
132 |
asm("add r3, r3, r1 ");
|
|
133 |
asm("str r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // iTriggerTime=ms count + aTime
|
|
134 |
|
|
135 |
// r0->CallBack, r2=TheTimerQ, r1=time interval, r3=trigger time
|
|
136 |
asm("AddMsCallBack: ");
|
|
137 |
asm("cmp r1, #32 "); // compare interval with 32ms
|
|
138 |
asm("bge add_mscb_holding "); // if >=32ms put it on holding queue
|
|
139 |
asm("ldrb r1, [r0, #%a0]" : : "i" _FOFF(NTimer,iCompleteInDfc)); // r1=iCompleteInDfc
|
|
140 |
asm("and r3, r3, #0x1f "); // r3=trigger time & 0x1f
|
|
141 |
asm("cmp r1, #0 ");
|
|
142 |
asm("add r1, r2, r3, lsl #4 "); // r1->IntQ corresponding to trigger time
|
|
143 |
asm("addne r1, r1, #8 "); // if (iCompleteInDfc), r1 points to DfcQ
|
|
144 |
asm("ldr r3, [r1, #4] "); // r3=pQ->iA.iPrev
|
|
145 |
asm("str r0, [r1, #4] "); // pQ->iA.iPrev=pC
|
|
146 |
asm("str r0, [r3, #0] "); // pQ->iA.iPrev->iNext=pC
|
|
147 |
asm("stmia r0, {r1,r3} "); // pC->iNext=&pQ->iA, pC->iPrev=pQ->iA.iPrev
|
|
148 |
asm("mov r1, #%a0" : : "i" ((TInt)EFinal));
|
|
149 |
asm("strb r1, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // iState=EFinal
|
|
150 |
asm("ldr r0, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // r0=iTriggerTime
|
|
151 |
asm("ldr r3, [r2, #%a0]" : : "i" _FOFF(NTimerQ,iPresent)); // r3=TheTimerQ->iPresent
|
|
152 |
asm("and r0, r0, #0x1f ");
|
|
153 |
asm("mov r1, #1 ");
|
|
154 |
asm("orr r3, r3, r1, lsl r0 "); // iPresent |= (1<<index)
|
|
155 |
asm("str r3, [r2, #%a0]" : : "i" _FOFF(NTimerQ,iPresent));
|
|
156 |
asm("mov r0, #0 "); // return KErrNone
|
|
157 |
asm("msr cpsr, r12 ");
|
|
158 |
__JUMP(,lr);
|
|
159 |
|
|
160 |
asm("add_mscb_holding: ");
|
|
161 |
asm("ldr r3, [r2, #%a0]!" : : "i" _FOFF(NTimerQ,iHoldingQ.iA.iPrev)); // r3=pQ->iPrev, r2=&iHoldingQ.iA.iPrev
|
|
162 |
asm("mov r1, #%a0" : : "i" ((TInt)EHolding));
|
|
163 |
asm("strb r1, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // iState=EHolding
|
|
164 |
asm("str r0, [r2], #-4 "); // pQ->iPrev=pC, r2=&iHoldingQ
|
|
165 |
asm("str r0, [r3, #0] "); // pQ->iPrev->iNext=pC
|
|
166 |
asm("stmia r0, {r2,r3} "); // pC->iNext=pQ, pC->iPrev=pQ->iPrev
|
|
167 |
asm("mov r0, #0 "); // return KErrNone
|
|
168 |
|
|
169 |
asm("add_mscb_0: ");
|
|
170 |
asm("msr cpsr, r12 ");
|
|
171 |
__JUMP(,lr);
|
|
172 |
|
|
173 |
asm("add_mscb_in_use: ");
|
|
174 |
asm("mov r0, #%a0" : : "i" ((TInt)KErrInUse)); // return KErrInUse
|
|
175 |
asm("msr cpsr, r12 ");
|
|
176 |
__JUMP(,lr);
|
|
177 |
|
|
178 |
asm("__TheTimerQ: ");
|
|
179 |
asm(".word TheTimerQ ");
|
|
180 |
}
|
|
181 |
|
|
182 |
|
|
183 |
/** Starts a nanokernel timer in one-shot mode with callback in dfc thread that provided DFC belongs to.
|
|
184 |
|
|
185 |
Queues the timer to expire in the specified number of nanokernel ticks. The
|
|
186 |
actual wait time will be at least that much and may be up to one tick more.
|
|
187 |
On expiry aDfc will be queued in ISR context.
|
|
188 |
|
|
189 |
Note that NKern::TimerTicks() can be used to convert milliseconds to ticks.
|
|
190 |
|
|
191 |
@param aTime Timeout in nanokernel ticks
|
|
192 |
@param aDfc - Dfc to be queued when the timer expires.
|
|
193 |
|
|
194 |
@return KErrNone if no error; KErrInUse if timer is already active.
|
|
195 |
|
|
196 |
@pre Any context
|
|
197 |
|
|
198 |
@see NKern::TimerTicks()
|
|
199 |
*/
|
|
200 |
|
|
201 |
__NAKED__ EXPORT_C TInt NTimer::OneShot(TInt /*aTime*/, TDfc& /*aDfc*/)
|
|
202 |
{
|
|
203 |
asm("mrs r12, cpsr ");
|
|
204 |
INTS_OFF(r3, r12, INTS_ALL_OFF); // all interrupts off
|
|
205 |
asm("ldrb r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // r3=iState
|
|
206 |
asm("cmp r3, #%a0" : : "i" ((TInt)EIdle));
|
|
207 |
asm("bne add_mscb_in_use "); // if already queued return KErrInUse
|
|
208 |
asm("mov r3, #0 ");
|
|
209 |
asm("strb r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iCompleteInDfc)); // iCompleteInDfc=0
|
|
210 |
asm("str r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iFunction)); // iFunction=NULL
|
|
211 |
asm("str r2, [r0, #%a0]" : : "i" _FOFF(NTimer,iPtr)); // iPtr= &aDfc
|
|
212 |
asm("ldr r2, __TheTimerQ ");
|
|
213 |
asm("ldr r3, [r2, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // r3=iMsCount
|
|
214 |
asm("add r3, r3, r1 ");
|
|
215 |
asm("str r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // iTriggerTime=ms count + aTime
|
|
216 |
asm("b AddMsCallBack ");
|
|
217 |
}
|
|
218 |
|
|
219 |
|
|
220 |
/** Cancel a nanokernel timer.
|
|
221 |
Removes this timer from the nanokernel timer queue. Does nothing if the
|
|
222 |
timer is inactive or has already expired.
|
|
223 |
Note that if the timer was queued and DFC callback requested it is possible
|
|
224 |
for the expiry handler to run even after Cancel() has been called. This will
|
|
225 |
occur in the case where DfcThread1 is preempted just before calling the
|
|
226 |
expiry handler for this timer and the preempting thread/ISR/IDFC calls
|
|
227 |
Cancel() on the timer.
|
|
228 |
|
|
229 |
@pre Any context
|
|
230 |
*/
|
|
231 |
EXPORT_C __NAKED__ void NTimer::Cancel()
|
|
232 |
{
|
|
233 |
asm("mrs r12, cpsr ");
|
|
234 |
INTS_OFF(r3, r12, INTS_ALL_OFF); // all interrupts off
|
|
235 |
asm("ldrb r3, [r0, #%a0]" : : "i" _FOFF(NTimer,iState));
|
|
236 |
asm("mov r1, #0 ");
|
|
237 |
asm("cmp r3, #%a0" : : "i" ((TInt)ETransferring));
|
|
238 |
asm("bcc cancel_idle "); // if EIdle, nothing to do
|
|
239 |
asm("strb r1, [r0, #%a0]" : : "i" _FOFF(NTimer,iState)); // iState=EIdle
|
|
240 |
asm("beq cancel_xfer "); // if ETransferring, branch
|
|
241 |
asm("ldmia r0, {r1,r2} "); // If queued, dequeue - r1=next, r2=prev
|
|
242 |
asm("cmp r3, #%a0" : : "i" ((TInt)ECritical));
|
|
243 |
asm("str r1, [r2, #0] "); // if queued, prev->next=next
|
|
244 |
asm("str r2, [r1, #4] "); // and next->prev=prev
|
|
245 |
ASM_KILL_LINK(r0,r1);
|
|
246 |
asm("ldrcs r1, __TheTimerQ ");
|
|
247 |
asm("ldrhi r0, [r0, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // r0=iTriggerTime
|
|
248 |
asm("ldrhi r3, [r1, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // r3=iMsCount
|
|
249 |
asm("bcc cancel_idle "); // if EHolding or EOrdered, finished
|
|
250 |
asm("beq cancel_critical "); // if ECritical, branch
|
|
251 |
// r1->ms timer, state was EFinal
|
|
252 |
asm("subs r3, r0, r3 "); // r3=trigger time - next tick
|
|
253 |
asm("bmi cancel_idle "); // if trigger time already expired, don't touch iPresent (was on iCompletedQ)
|
|
254 |
asm("and r0, r0, #0x1f "); // r0=iTriggerTime&0x1f = queue index
|
|
255 |
asm("mov r3, r1 ");
|
|
256 |
asm("ldr r2, [r3, r0, lsl #4]! "); // r3->iIntQ for this timer, r2=iIntQ head pointer
|
|
257 |
asm("cmp r2, r3 ");
|
|
258 |
asm("bne cancel_idle "); // iIntQ not empty so finished
|
|
259 |
asm("ldr r2, [r3, #8]! "); // r2=iDfcQ head pointer
|
|
260 |
asm("cmp r2, r3 ");
|
|
261 |
asm("bne cancel_idle "); // iDfcQ not empty so finished
|
|
262 |
asm("ldr r2, [r1, #%a0]" : : "i" _FOFF(NTimerQ,iPresent)); // r2=TheTimerQ->iPresent
|
|
263 |
asm("mov r3, #1 ");
|
|
264 |
asm("bic r2, r2, r3, lsl r0 "); // iPresent &= ~(1<<index)
|
|
265 |
asm("str r2, [r1, #%a0]" : : "i" _FOFF(NTimerQ,iPresent));
|
|
266 |
|
|
267 |
asm("cancel_idle: ");
|
|
268 |
asm("msr cpsr, r12 ");
|
|
269 |
__JUMP(,lr);
|
|
270 |
|
|
271 |
asm("cancel_xfer: ");
|
|
272 |
asm("ldr r1, __TheTimerQ "); // r1->ms timer, state was ETransferring
|
|
273 |
asm("strb r3, [r1, #%a0]" : : "i" _FOFF(NTimerQ,iTransferringCancelled));
|
|
274 |
asm("msr cpsr, r12 ");
|
|
275 |
__JUMP(,lr);
|
|
276 |
|
|
277 |
asm("cancel_critical: "); // r1->ms timer, state was ECritical
|
|
278 |
asm("strb r3, [r1, #%a0]" : : "i" _FOFF(NTimerQ,iCriticalCancelled));
|
|
279 |
asm("msr cpsr, r12 ");
|
|
280 |
__JUMP(,lr);
|
|
281 |
}
|
|
282 |
|
|
283 |
|
|
284 |
/** Return the number of ticks before the next nanokernel timer expiry.
|
|
285 |
May on occasion return a pessimistic estimate (i.e. too low).
|
|
286 |
Used by base port to disable the system tick interrupt when the system
|
|
287 |
is idle.
|
|
288 |
|
|
289 |
@return The number of ticks before the next nanokernel timer expiry.
|
|
290 |
|
|
291 |
@pre Interrupts must be disabled.
|
|
292 |
|
|
293 |
@post Interrupts are disabled.
|
|
294 |
*/
|
|
295 |
EXPORT_C __NAKED__ TInt NTimerQ::IdleTime()
|
|
296 |
{
|
|
297 |
ASM_CHECK_PRECONDITIONS(MASK_INTERRUPTS_DISABLED);
|
|
298 |
|
|
299 |
asm("ldr r12, __TheTimerQ ");
|
|
300 |
asm("mvn r0, #0x80000000 "); // set r0=KMaxTInt initially
|
|
301 |
asm("ldr r2, [r12, #%a0]!" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r12->iOrderedQ, r2=iOrderedQ first
|
|
302 |
asm("ldr r3, [r12, #-12] "); // r3=next tick number
|
|
303 |
asm("cmp r2, r12 "); // check if iOrderedQ empty
|
|
304 |
asm("ldrne r0, [r2, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // if not, r0=ordered Q first->trigger time
|
|
305 |
asm("ldr r1, [r12, #-8]! "); // r1=iHoldingQ first, r12->iHoldingQ
|
|
306 |
asm("bicne r0, r0, #0x0f ");
|
|
307 |
asm("subne r0, r0, #16 "); // r0=tick at which transfer to final queue would occur
|
|
308 |
asm("subne r0, r0, r3 "); // return value = trigger time - iMsCount
|
|
309 |
asm("cmp r1, r12 "); // holding Q empty?
|
|
310 |
asm("ldr r1, [r12, #-8] "); // r1=iPresent
|
|
311 |
asm("and r12, r3, #0x1f "); // r12=next tick mod 32
|
|
312 |
asm("beq 1f "); // branch if holding Q empty
|
|
313 |
asm("ands r2, r3, #0x0f "); // else r2=next tick no. mod 16
|
|
314 |
asm("rsbne r2, r2, #16 "); // if nonzero, subtract from 16 to give #ticks before next multiple of 16
|
|
315 |
asm("cmp r2, r0 ");
|
|
316 |
asm("movlt r0, r2 "); // update result if necessary
|
|
317 |
asm("1: ");
|
|
318 |
asm("movs r1, r1, ror r12 "); // r1=iPresent rotated so that LSB corresponds to next tick
|
|
319 |
__JUMP(eq,lr); // if iPresent=0, finished
|
|
320 |
asm("mov r3, #0 "); // r3 will accumulate bit number of least significant 1
|
|
321 |
asm("movs r2, r1, lsl #16 ");
|
|
322 |
asm("movne r1, r2 ");
|
|
323 |
asm("addeq r3, r3, #16 ");
|
|
324 |
asm("movs r2, r1, lsl #8 ");
|
|
325 |
asm("movne r1, r2 ");
|
|
326 |
asm("addeq r3, r3, #8 ");
|
|
327 |
asm("movs r2, r1, lsl #4 ");
|
|
328 |
asm("movne r1, r2 ");
|
|
329 |
asm("addeq r3, r3, #4 ");
|
|
330 |
asm("movs r2, r1, lsl #2 ");
|
|
331 |
asm("movne r1, r2 ");
|
|
332 |
asm("addeq r3, r3, #2 ");
|
|
333 |
asm("movs r2, r1, lsl #1 ");
|
|
334 |
asm("addeq r3, r3, #1 ");
|
|
335 |
asm("cmp r3, r0 ");
|
|
336 |
asm("movlt r0, r3 "); // update result if necessary
|
|
337 |
__JUMP(,lr);
|
|
338 |
}
|
|
339 |
|
|
340 |
|
|
341 |
/** Tick over the nanokernel timer queue.
|
|
342 |
This function should be called by the base port in the system tick timer ISR.
|
|
343 |
It should not be called at any other time.
|
|
344 |
The value of 'this' to pass is the value returned by NTimerQ::TimerAddress().
|
|
345 |
|
|
346 |
@see NTimerQ::TimerAddress()
|
|
347 |
*/
|
|
348 |
__NAKED__ EXPORT_C void NTimerQ::Tick()
|
|
349 |
{
|
|
350 |
// Enter with r0 pointing to NTimerQ
|
|
351 |
asm("ldr r1, __TheScheduler ");
|
|
352 |
asm("mrs r12, cpsr ");
|
|
353 |
|
|
354 |
// do the timeslice tick - on ARM __SCHEDULER_MACHINE_CODED is mandatory
|
|
355 |
asm("ldr r2, [r1, #%a0]" : : "i" _FOFF(TScheduler,iCurrentThread));
|
|
356 |
asm("ldr r3, [r2, #%a0]" : : "i" _FOFF(NThread,iTime));
|
|
357 |
asm("subs r3, r3, #1 ");
|
|
358 |
asm("strge r3, [r2, #%a0]" : : "i" _FOFF(NThread,iTime));
|
|
359 |
asm("streqb r12, [r1, #%a0]" : : "i" _FOFF(TScheduler,iRescheduleNeededFlag)); // r12 lower byte is never 0
|
|
360 |
INTS_OFF(r3, r12, INTS_ALL_OFF); // disable all interrupts
|
|
361 |
|
|
362 |
asm("ldr r1, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // r1=iMsCount
|
|
363 |
asm("ldr r3, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iPresent)); // r3=iPresent
|
|
364 |
asm("and r2, r1, #0x1f "); // r2=iMsCount & 0x1f
|
|
365 |
asm("add r1, r1, #1 ");
|
|
366 |
asm("str r1, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // iMsCount++
|
|
367 |
asm("mov r1, #1 ");
|
|
368 |
asm("tst r3, r1, lsl r2 "); // test iPresent bit for this tick
|
|
369 |
asm("bic r1, r3, r1, lsl r2 "); // clear iPresent bit
|
|
370 |
asm("strne r1, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iPresent)); // update iPresent if necessary
|
|
371 |
asm("bne mstim_tick_1 "); // if bit was set, we have work to do
|
|
372 |
asm("tst r2, #0x0f "); // else test for tick 0 or 16
|
|
373 |
asm("msrne cpsr_c, r12 "); // if neither return
|
|
374 |
__JUMP(ne,lr);
|
|
375 |
|
|
376 |
asm("mstim_tick_1: "); // get here if timers complete this tick
|
|
377 |
asm("stmfd sp!, {r4-r6,lr} ");
|
|
378 |
asm("add r1, r0, r2, lsl #4 "); // r1->IntQ for this tick
|
|
379 |
asm("ldr r3, [r1, #8]! "); // r1->DfcQ and r3=DfcQ first
|
|
380 |
asm("mov r5, #0 "); // r5=doDfc=FALSE
|
|
381 |
asm("cmp r3, r1 ");
|
|
382 |
asm("beq mstim_tick_2 "); // skip if DfcQ empty
|
|
383 |
|
|
384 |
// Move DFC completions from iDfcQ to iCompletedQ
|
|
385 |
asm("ldr lr, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iCompletedQ.iA.iPrev)); // lr=last completed
|
|
386 |
asm("ldr r4, [r1, #4] "); // r4=DfcQ last
|
|
387 |
asm("add r5, r0, #%a0" : : "i" _FOFF(NTimerQ,iDfc)); // doDfc=TRUE
|
|
388 |
asm("str r3, [lr, #0] "); // old last pending->next = DfcQ first
|
|
389 |
asm("str r4, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iCompletedQ.iA.iPrev)); // last pending=DfcQ last
|
|
390 |
asm("str lr, [r3, #4] "); // DfcQ first->prev = old last pending
|
|
391 |
asm("add lr, r0, #%a0" : : "i" _FOFF(NTimerQ,iCompletedQ)); // lr=&iCompletedQ.iA
|
|
392 |
asm("str lr, [r4, #0] "); // DfcQ last->next=&iPending
|
|
393 |
asm("str r1, [r1, #0] "); // DfcQ first=&DfcQ
|
|
394 |
asm("str r1, [r1, #4] "); // DfcQ last=&DfcQ
|
|
395 |
|
|
396 |
asm("mstim_tick_2: ");
|
|
397 |
asm("tst r2, #0x0f "); // check for tick 0 or 16
|
|
398 |
asm("bne mstim_tick_3 "); // skip if not
|
|
399 |
|
|
400 |
// Tick 0 or 16 - must check holding queue and ordered queue
|
|
401 |
asm("ldr r3, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iHoldingQ)); // r3=iHoldingQ first
|
|
402 |
asm("add r6, r0, #%a0" : : "i" _FOFF(NTimerQ,iHoldingQ)); // r6=&iHoldingQ
|
|
403 |
asm("cmp r3, r6 ");
|
|
404 |
asm("addne r5, r0, #%a0" : : "i" _FOFF(NTimerQ,iDfc)); // if iHoldingQ nonempty, doDfc=TRUE and skip ordered queue check
|
|
405 |
asm("bne mstim_tick_3 "); // skip if iHoldingQ nonempty
|
|
406 |
asm("ldr r3, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r3=iOrderedQ first
|
|
407 |
asm("add r6, r0, #%a0" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r6=&iOrderedQ
|
|
408 |
asm("cmp r3, r6 ");
|
|
409 |
asm("beq mstim_tick_3 "); // skip if iOrderedQ empty
|
|
410 |
asm("ldr r4, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iMsCount)); // else r4=iMsCount
|
|
411 |
asm("ldr r3, [r3, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // and r3=trigger time of first on ordered queue
|
|
412 |
asm("sub r3, r3, r4 "); // r3=trigger time-iMsCount
|
|
413 |
asm("cmp r3, #31 ");
|
|
414 |
asm("addls r5, r0, #%a0" : : "i" _FOFF(NTimerQ,iDfc)); // if first expiry in <=31ms, doDfc=TRUE
|
|
415 |
|
|
416 |
// Check iIntQ
|
|
417 |
asm("mstim_tick_3: ");
|
|
418 |
asm("ldr r3, [r1, #-8]! "); // r1->iIntQ, r3=iIntQ first
|
|
419 |
asm("mov r6, r12 "); // r6=original cpsr
|
|
420 |
asm("cmp r3, r1 "); // test if iIntQ empty
|
|
421 |
asm("beq mstim_tick_4 "); // branch if it is
|
|
422 |
|
|
423 |
// Transfer iIntQ to a temporary queue
|
|
424 |
asm("ldr r4, [r1, #4] "); // r4=iIntQ last
|
|
425 |
asm("str r1, [r1, #0] "); // clear iIntQ
|
|
426 |
asm("str r1, [r1, #4] ");
|
|
427 |
asm("stmfd sp!, {r3,r4} "); // copy queue onto stack
|
|
428 |
asm("str sp, [r4, #0] "); // iIntQ last->next=sp
|
|
429 |
asm("str sp, [r3, #4] "); // iIntQ first->prev=sp
|
|
430 |
INTS_OFF_1(r4, r6, INTS_ALL_OFF); // r4=cpsr with all interrupts off
|
|
431 |
|
|
432 |
// Walk the temporary queue and complete timers
|
|
433 |
asm("mstim_tick_5: ");
|
|
434 |
INTS_OFF_2(r4, r6, INTS_ALL_OFF); // all interrupts off
|
|
435 |
asm("ldr r0, [sp, #0] "); // r0=q.iNext
|
|
436 |
asm("mov r3, #%a0" : : "i" ((TInt)NTimer::EIdle));
|
|
437 |
asm("cmp r0, sp "); // end of queue?
|
|
438 |
asm("beq mstim_tick_6 "); // if so, branch out
|
|
439 |
asm("ldmia r0!, {r1,r2} "); // r1=next r2=prev, r0->iPtr
|
|
440 |
asm("strb r3, [r0, #%a0]" : : "i" (_FOFF(NTimer,iState)-8)); // iState=EIdle
|
|
441 |
ASM_KILL_LINK_OFFSET(r0,r12,-8);
|
|
442 |
asm("ldmia r0, {r0,r12} "); // r0=iPtr, r12=iFunction
|
|
443 |
asm("str r1, [r2, #0] "); // prev->next=next
|
|
444 |
asm("str r2, [r1, #4] "); // next->prev=prev
|
|
445 |
asm("adr lr, mstim_tick_5 "); // return to mstim_tick_5
|
|
446 |
asm("msr cpsr, r6 "); // restore interrupts
|
|
447 |
asm("cmp r12, #0 "); // iFunction==NULL ?
|
|
448 |
asm("beq mstim_tick_7 "); // if so queue Dfc (iPtr is a pointer to TDfc )
|
|
449 |
__JUMP(,r12); // call timer callback with r0=iPtr
|
|
450 |
asm("b mstim_tick_6 "); // skip queuing of Dfc
|
|
451 |
|
|
452 |
asm("mstim_tick_7: ");
|
|
453 |
asm("b " CSM_ZN4TDfc3AddEv); // add the DFC with r0=iPtr - a pointer to TDfc
|
|
454 |
|
|
455 |
asm("mstim_tick_6: ");
|
|
456 |
asm("add sp, sp, #8 "); // take temporary queue off stack
|
|
457 |
|
|
458 |
asm("mstim_tick_4: ");
|
|
459 |
asm("msr cpsr, r6 "); // restore original interrupt state
|
|
460 |
asm("movs r0, r5 "); // DFC needed? if so, r0->iDfc
|
|
461 |
asm("ldmfd sp!, {r4-r6,lr} "); // restore registers
|
|
462 |
asm("bne " CSM_ZN4TDfc3AddEv); // add the DFC if required
|
|
463 |
__JUMP(,lr); // if no DFC needed, return
|
|
464 |
|
|
465 |
asm("__TheScheduler: ");
|
|
466 |
asm(".word TheScheduler ");
|
|
467 |
}
|
|
468 |
|
|
469 |
__NAKED__ void NTimerQ::DfcFn(TAny* /*aPtr*/)
|
|
470 |
{
|
|
471 |
// Enter with r0 pointing to NTimerQ
|
|
472 |
asm("stmfd sp!, {r7-r11,lr} ");
|
|
473 |
SET_INTS_1(r11, MODE_SVC, INTS_ALL_ON); // always called from SVC mode
|
|
474 |
SET_INTS_1(r10, MODE_SVC, INTS_ALL_OFF); // with interruts enabled
|
|
475 |
|
|
476 |
// First transfer entries on the Ordered queue to the Final queues
|
|
477 |
asm("mstim_dfc_0: ");
|
|
478 |
SET_INTS_2(r10, MODE_SVC, INTS_ALL_OFF); // disable interrupts
|
|
479 |
asm("ldr r1, [r0, #%a0]!" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r0->iOrderedQ, r1=orderedQ first
|
|
480 |
asm("cmp r1, r0 ");
|
|
481 |
asm("beq mstim_dfc_1 "); // ordered Q empty so move to next stage
|
|
482 |
asm("ldr r2, [r1, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // r2=r1->trigger time
|
|
483 |
asm("ldr r3, [r0, #-12] "); // r3=iMsCount
|
|
484 |
asm("subs r3, r2, r3 "); // r3=trigger time-iMsCount
|
|
485 |
asm("cmp r3, #31 "); // test if remaining time <32ms or has already passed
|
|
486 |
asm("bgt mstim_dfc_1 "); // if >31ms, move to next stage (signed comparison to catch already passed case)
|
|
487 |
asm("sub r0, r0, #%a0" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r0->NTimerQ
|
|
488 |
asm("bl dequeaddfinal "); // <=31ms, so deque and add to final queue
|
|
489 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
490 |
__DEBUG_CALLBACK(0);
|
|
491 |
asm("b mstim_dfc_0 ");
|
|
492 |
|
|
493 |
asm("mstim_dfc_1: ");
|
|
494 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
495 |
asm("sub r0, r0, #%a0" : : "i" _FOFF(NTimerQ,iOrderedQ)); // r0->NTimerQ
|
|
496 |
__DEBUG_CALLBACK(1);
|
|
497 |
|
|
498 |
// Next transfer entries on the Holding queue to the Ordered queue or final queue
|
|
499 |
asm("mstim_dfc_2: ");
|
|
500 |
SET_INTS_2(r10, MODE_SVC, INTS_ALL_OFF); // disable interrupts
|
|
501 |
asm("ldr r1, [r0, #%a0]!" : : "i" _FOFF(NTimerQ,iHoldingQ)); // r0->iHoldingQ, r1=holdingQ first
|
|
502 |
asm("cmp r1, r0 ");
|
|
503 |
asm("beq mstim_dfc_3 "); // holding Q empty so move to next stage
|
|
504 |
asm("ldr r2, [r1, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // r2=r1->trigger time
|
|
505 |
asm("ldr r3, [r0, #-4] "); // r3=iMsCount
|
|
506 |
asm("sub r0, r0, #%a0" : : "i" _FOFF(NTimerQ,iHoldingQ)); // r0->NTimerQ
|
|
507 |
asm("subs r3, r2, r3 "); // r3=trigger time-iMsCount
|
|
508 |
asm("cmp r3, #31 "); // test if remaining time <32ms or has already passed
|
|
509 |
asm("bgt mstim_dfc_4 "); // if >31ms, need to put it on the ordered Q (signed comparison to catch late case)
|
|
510 |
asm("bl dequeaddfinal "); // <=31ms or already passed, so deque and add to final queue
|
|
511 |
|
|
512 |
asm("mstim_dfc_7: ");
|
|
513 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
514 |
__DEBUG_CALLBACK(2);
|
|
515 |
asm("b mstim_dfc_2 "); // process next holding Q entry
|
|
516 |
|
|
517 |
// need to put entry r1 trigger time r2 on the ordered Q
|
|
518 |
asm("mstim_dfc_4: ");
|
|
519 |
asm("ldmia r1, {r3,r12} "); // r3=r1->next, r12=r1->prev
|
|
520 |
asm("mov r9, #0 ");
|
|
521 |
asm("strb r9, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iTransferringCancelled)); // iTransferringCancelled=0
|
|
522 |
asm("str r3, [r12, #0] "); // prev->next=next
|
|
523 |
asm("str r12, [r3, #4] "); // next->prev=prev
|
|
524 |
asm("mov r3, #%a0" : : "i" ((TInt)NTimer::ETransferring));
|
|
525 |
asm("strb r3, [r1, #%a0]" : : "i" _FOFF(NTimer,iState)); // r1->iState=ETransferring
|
|
526 |
|
|
527 |
asm("mstim_dfc_5: ");
|
|
528 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
529 |
asm("add lr, r0, #%a0" : : "i" _FOFF(NTimerQ,iOrderedQ)); // lr=&iOrderedQ.iA
|
|
530 |
__DEBUG_CALLBACK(3);
|
|
531 |
SET_INTS_2(r10, MODE_SVC, INTS_ALL_OFF); // disable interrupts
|
|
532 |
|
|
533 |
asm("mstim_dfc_9: ");
|
|
534 |
asm("ldrb r12, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iTransferringCancelled));
|
|
535 |
asm("ldr r3, [lr, #0] "); // r3=iOrderedQ first
|
|
536 |
asm("cmp r12, #0 ");
|
|
537 |
asm("bne mstim_dfc_7 "); // Entry r1 has been cancelled so move to next one
|
|
538 |
asm("strb r9, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iCriticalCancelled)); // iCriticalCancelled=0
|
|
539 |
|
|
540 |
// Walk iOrderedQ to find correct position for this entry
|
|
541 |
asm("mstim_dfc_6: ");
|
|
542 |
asm("cmp r3, lr "); // reached end of ordered Q?
|
|
543 |
asm("ldrne r12, [r3, #%a0]" : : "i" _FOFF(NTimer,iTriggerTime)); // if not, r12=r3->trigger time
|
|
544 |
asm("beq mstim_dfc_8 "); // branch if we have
|
|
545 |
asm("mov r8, #%a0" : : "i" ((TInt)NTimer::ECritical));
|
|
546 |
asm("subs r12, r12, r2 "); // r12=r3->trigger - r1->trigger
|
|
547 |
asm("bpl mstim_dfc_8 "); // branch if r3 expires after r1
|
|
548 |
asm("strb r8, [r3, #%a0]" : : "i" _FOFF(NTimer,iState)); // r3->iState=ECritical
|
|
549 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
550 |
asm("mov r8, #%a0" : : "i" ((TInt)NTimer::EOrdered));
|
|
551 |
__DEBUG_CALLBACK(4);
|
|
552 |
SET_INTS_2(r10, MODE_SVC, INTS_ALL_OFF); // disable interrupts
|
|
553 |
asm("ldr r12, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iTransferringCancelled));
|
|
554 |
asm("tst r12, #0xff00 "); // test iCriticalCancelled
|
|
555 |
asm("streqb r8, [r3, #%a0]" : : "i" _FOFF(NTimer,iState)); // if not set, r3->iState=EOrdered
|
|
556 |
asm("cmp r12, #0 "); // test iTransferringCancelled and iCriticalCancelled
|
|
557 |
asm("ldreq r3, [r3, #0] "); // if neither set r3=r3->iNext
|
|
558 |
asm("beq mstim_dfc_6 "); // and inspect next ordered Q entry
|
|
559 |
asm("b mstim_dfc_9 "); // if either set, start again from beginning of ordered Q
|
|
560 |
|
|
561 |
asm("mstim_dfc_8: "); // if we get to here we need to insert r1 before r3
|
|
562 |
asm("ldr r12, [r3, #4] "); // r12=r3->iPrev
|
|
563 |
asm("mov r8, #%a0" : : "i" ((TInt)NTimer::EOrdered));
|
|
564 |
asm("strb r8, [r1, #%a0]" : : "i" _FOFF(NTimer,iState)); // r1->iState=EOrdered
|
|
565 |
asm("str r1, [r3, #4] "); // r3->prev=r1
|
|
566 |
asm("str r1, [r12, #0] "); // r3->prev->next=r1
|
|
567 |
asm("stmia r1, {r3,r12} "); // r1->next=r3, r1->prev=r3->prev
|
|
568 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
569 |
__DEBUG_CALLBACK(5);
|
|
570 |
asm("b mstim_dfc_2 "); // process next holding Q entry
|
|
571 |
|
|
572 |
// Get here when all holding Q entries processed
|
|
573 |
asm("mstim_dfc_3: ");
|
|
574 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
575 |
__DEBUG_CALLBACK(6);
|
|
576 |
asm("add r8, r0, #16 "); // r8->iCompletedQ
|
|
577 |
|
|
578 |
// Finally do call backs for timers which requested DFC callback
|
|
579 |
asm("mstim_dfc_10: ");
|
|
580 |
SET_INTS_2(r10, MODE_SVC, INTS_ALL_OFF); // disable interrupts
|
|
581 |
asm("ldr r9, [r8, #0] "); // r9=completed Q first
|
|
582 |
asm("mov r3, #%a0" : : "i" ((TInt)NTimer::EIdle));
|
|
583 |
asm("cmp r9, r8 "); // Is completed Q empty?
|
|
584 |
asm("beq mstim_dfc_11 "); // branch out if it is
|
|
585 |
asm("ldmia r9!, {r1,r2} "); // r1=r9->next, r2=r9->prev, r9->iPtr of completed entry
|
|
586 |
asm("strb r3, [r9, #%a0]" : : "i" (_FOFF(NTimer,iState)-8)); // iState=EIdle for completed entry
|
|
587 |
asm("ldmia r9, {r0,r3} "); // r0=iPtr, r3=function address
|
|
588 |
ASM_KILL_LINK_OFFSET(r9,r12,-8);
|
|
589 |
asm("str r1, [r2, #0] "); // prev->next=next
|
|
590 |
asm("str r2, [r1, #4] "); // next->prev=prev
|
|
591 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
592 |
__DEBUG_CALLBACK(7);
|
|
593 |
asm("adr lr, mstim_dfc_10 "); // return to mstim_dfc_10
|
|
594 |
__JUMP(,r3); // call back with r0=iPtr
|
|
595 |
|
|
596 |
// All done
|
|
597 |
asm("mstim_dfc_11: ");
|
|
598 |
SET_INTS_2(r11, MODE_SVC, INTS_ALL_ON); // let interrupts in
|
|
599 |
asm("ldmfd sp!, {r7-r11,pc} "); // and return
|
|
600 |
|
|
601 |
// Subroutine dequeaddfinal
|
|
602 |
// Deque the NTimer pointed to by r1 and put it on its final queue
|
|
603 |
// Enter with r0->NTimerQ, r1->NTimer, r2=r1->iTriggerTime
|
|
604 |
// Enter and leave with interrupts disabled
|
|
605 |
// Can modify r1-r3,r8,r9,r12
|
|
606 |
asm("dequeaddfinal: ");
|
|
607 |
asm("ldmia r1, {r8,r9} "); // r8=r1->next, r9=r1->prev
|
|
608 |
asm("mov r3, #%a0" : : "i" ((TInt)NTimer::EFinal));
|
|
609 |
asm("strb r3, [r1, #%a0]" : : "i" _FOFF(NTimer,iState)); // iState=EFinal
|
|
610 |
asm("str r8, [r9, #0] "); // prev->next=next
|
|
611 |
asm("str r9, [r8, #4] "); // next->prev=prev
|
|
612 |
asm("ldr r12, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iPresent)); // r12=timer iPresent
|
|
613 |
asm("and r2, r2, #0x1f "); // r2=trigger time & 0x1f
|
|
614 |
asm("mov r3, #1 ");
|
|
615 |
asm("orr r12, r12, r3, lsl r2 "); // set bit in iPresent
|
|
616 |
asm("ldrb r3, [r1, #%a0]" : : "i" _FOFF(NTimer,iCompleteInDfc)); // r3=iCompleteInDfc
|
|
617 |
asm("str r12, [r0, #%a0]" : : "i" _FOFF(NTimerQ,iPresent));
|
|
618 |
asm("add r2, r0, r2, lsl #4 "); // r2->iIntQ for this timer
|
|
619 |
asm("cmp r3, #0 ");
|
|
620 |
asm("addne r2, r2, #8 "); // if iCompleteInDfc, r2->iDfcQ for this timer
|
|
621 |
asm("ldr r12, [r2, #4] "); // r12->last on queue
|
|
622 |
asm("str r1, [r2, #4] "); // queue->last=this
|
|
623 |
asm("str r1, [r12, #0] "); // last->next=this
|
|
624 |
asm("stmia r1, {r2,r12} "); // this->next=&queue, this->prev=last on queue
|
|
625 |
__JUMP(,lr);
|
|
626 |
}
|
|
627 |
#endif
|
|
628 |
|