author | Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com> |
Tue, 31 Aug 2010 16:34:26 +0300 | |
branch | RCL_3 |
changeset 43 | c1f20ce4abcf |
parent 0 | a41df078684a |
child 44 | 3e88ff8f41d5 |
permissions | -rw-r--r-- |
0 | 1 |
// Copyright (c) 1998-2009 Nokia Corporation and/or its subsidiary(-ies). |
2 |
// All rights reserved. |
|
3 |
// This component and the accompanying materials are made available |
|
4 |
// under the terms of the License "Eclipse Public License v1.0" |
|
5 |
// which accompanies this distribution, and is available |
|
6 |
// at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
7 |
// |
|
8 |
// Initial Contributors: |
|
9 |
// Nokia Corporation - initial contribution. |
|
10 |
// |
|
11 |
// Contributors: |
|
12 |
// |
|
13 |
// Description: |
|
14 |
// e32\include\nkern\nkern.h |
|
15 |
// |
|
16 |
// WARNING: This file contains some APIs which are internal and are subject |
|
17 |
// to change without notice. Such APIs should therefore not be used |
|
18 |
// outside the Kernel and Hardware Services package. |
|
19 |
// |
|
20 |
||
21 |
#ifndef __NKERN_H__ |
|
22 |
#define __NKERN_H__ |
|
23 |
||
24 |
#ifdef __STANDALONE_NANOKERNEL__ |
|
25 |
#undef __IN_KERNEL__ |
|
26 |
#define __IN_KERNEL__ |
|
27 |
#endif |
|
28 |
||
29 |
#include <e32const.h> |
|
30 |
#include <nklib.h> |
|
31 |
#include <dfcs.h> |
|
32 |
#include <nk_trace.h> |
|
33 |
#include <e32atomics.h> |
|
34 |
||
35 |
extern "C" { |
|
36 |
/** @internalComponent */ |
|
37 |
IMPORT_C void NKFault(const char* file, TInt line); |
|
38 |
/** @internalComponent */ |
|
43
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
39 |
void NKIdle(TUint32 aStage); |
0 | 40 |
} |
41 |
||
42 |
/** |
|
43 |
@publishedPartner |
|
44 |
@released |
|
45 |
*/ |
|
46 |
#define FAULT() NKFault(__FILE__,__LINE__) |
|
47 |
||
48 |
#ifdef _DEBUG |
|
49 |
||
50 |
/** |
|
51 |
@publishedPartner |
|
52 |
@released |
|
53 |
*/ |
|
54 |
#define __NK_ASSERT_DEBUG(c) ((void) ((c)||(FAULT(),0)) ) |
|
55 |
||
56 |
#else |
|
57 |
||
58 |
#define __NK_ASSERT_DEBUG(c) |
|
59 |
||
60 |
#endif |
|
61 |
||
62 |
/** |
|
63 |
@publishedPartner |
|
64 |
@released |
|
65 |
*/ |
|
66 |
#define __NK_ASSERT_ALWAYS(c) ((void) ((c)||(FAULT(),0)) ) |
|
67 |
||
68 |
/** |
|
69 |
@publishedPartner |
|
70 |
@released |
|
71 |
*/ |
|
72 |
const TInt KNumPriorities=64; |
|
73 |
||
74 |
const TInt KMaxCpus=8; |
|
75 |
||
76 |
class NThread; |
|
77 |
||
78 |
||
79 |
/** Spin lock |
|
80 |
||
81 |
Used for protecting a code fragment against both interrupts and concurrent |
|
82 |
execution on another processor. |
|
83 |
||
84 |
@internalComponent |
|
85 |
*/ |
|
86 |
class TSpinLock |
|
87 |
{ |
|
88 |
public: |
|
89 |
enum TOrder |
|
90 |
{ |
|
91 |
// Bit 7 of order clear for locks used with interrupts disabled |
|
92 |
EOrderGenericIrqLow0 =0x00u, // Device driver spin locks, low range |
|
93 |
EOrderGenericIrqLow1 =0x01u, // Device driver spin locks, low range |
|
94 |
EOrderGenericIrqLow2 =0x02u, // Device driver spin locks, low range |
|
95 |
EOrderGenericIrqLow3 =0x03u, // Device driver spin locks, low range |
|
96 |
EOrderGenericIrqHigh0 =0x18u, // Device driver spin locks, high range |
|
97 |
EOrderGenericIrqHigh1 =0x19u, // Device driver spin locks, high range |
|
98 |
EOrderGenericIrqHigh2 =0x1Au, // Device driver spin locks, high range |
|
99 |
EOrderGenericIrqHigh3 =0x1Bu, // Device driver spin locks, high range |
|
100 |
||
101 |
// Bit 7 of order set for locks used with interrupts enabled, preemption disabled |
|
102 |
EOrderGenericPreLow0 =0x80u, // Device driver spin locks, low range |
|
103 |
EOrderGenericPreLow1 =0x81u, // Device driver spin locks, low range |
|
104 |
EOrderGenericPreHigh0 =0x9Eu, // Device driver spin locks, high range |
|
105 |
EOrderGenericPreHigh1 =0x9Fu, // Device driver spin locks, high range |
|
106 |
||
107 |
EOrderNone =0xFFu // No order check required (e.g. for dynamic ordering) |
|
108 |
}; |
|
109 |
public: |
|
110 |
IMPORT_C TSpinLock(TUint aOrder); |
|
111 |
private: |
|
112 |
volatile TUint64 iLock; |
|
113 |
}; |
|
114 |
||
115 |
/** Macro to disable interrupts and acquire the lock. |
|
116 |
||
117 |
@publishedPartner |
|
118 |
@prototype |
|
119 |
*/ |
|
120 |
#define __SPIN_LOCK_IRQ(lock) ((void)NKern::DisableAllInterrupts()) |
|
121 |
||
122 |
/** Macro to release the lock and enable interrupts. |
|
123 |
||
124 |
@publishedPartner |
|
125 |
@prototype |
|
126 |
*/ |
|
127 |
#define __SPIN_UNLOCK_IRQ(lock) (NKern::EnableAllInterrupts()) |
|
128 |
||
129 |
/** Macro to see if someone else is waiting for the lock, enabling IRQs |
|
130 |
then disabling IRQs again. |
|
131 |
||
132 |
@publishedPartner |
|
133 |
@prototype |
|
134 |
*/ |
|
135 |
#define __SPIN_FLASH_IRQ(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE)) |
|
136 |
||
137 |
/** Macro to remember original interrupt state then disable interrupts |
|
138 |
and acquire the lock. |
|
139 |
||
140 |
@publishedPartner |
|
141 |
@prototype |
|
142 |
*/ |
|
143 |
#define __SPIN_LOCK_IRQSAVE(lock) (NKern::DisableAllInterrupts()) |
|
144 |
||
145 |
/** Macro to release the lock then restore original interrupt state to that |
|
146 |
supplied. |
|
147 |
||
148 |
@publishedPartner |
|
149 |
@prototype |
|
150 |
*/ |
|
151 |
#define __SPIN_UNLOCK_IRQRESTORE(lock,irq) (NKern::RestoreInterrupts(irq)) |
|
152 |
||
153 |
/** Macro to see if someone else is waiting for the lock, enabling IRQs to |
|
154 |
the original state supplied then disabling IRQs again. |
|
155 |
||
156 |
@publishedPartner |
|
157 |
@prototype |
|
158 |
*/ |
|
159 |
#define __SPIN_FLASH_IRQRESTORE(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE)) |
|
160 |
||
161 |
/** Macro to acquire the lock. This assumes the caller has already disabled |
|
162 |
interrupts/preemption. |
|
163 |
||
164 |
If interrupts/preemption is not disabled a run-time assert will occur |
|
165 |
This is to protect against unsafe code that might lead to same core |
|
166 |
deadlock. |
|
167 |
||
168 |
In device driver code it is safer to use __SPIN_LOCK_IRQSAVE() instead, |
|
169 |
although not as efficient should interrupts aleady be disabled for the |
|
170 |
duration the lock is held. |
|
171 |
||
172 |
@publishedPartner |
|
173 |
@prototype |
|
174 |
*/ |
|
175 |
#define __SPIN_LOCK(lock) |
|
176 |
||
177 |
/** Macro to release the lock, don't change interrupt/preemption state. |
|
178 |
||
179 |
@publishedPartner |
|
180 |
@prototype |
|
181 |
*/ |
|
182 |
#define __SPIN_UNLOCK(lock) |
|
183 |
||
184 |
/** |
|
185 |
@internalComponent |
|
186 |
*/ |
|
187 |
#define __SPIN_FLASH(lock) ((TBool)FALSE) |
|
188 |
||
189 |
/** Macro to see if someone else is waiting for the lock, enabling preemption |
|
190 |
then disabling it again. |
|
191 |
||
192 |
@publishedPartner |
|
193 |
@prototype |
|
194 |
*/ |
|
195 |
#define __SPIN_FLASH_PREEMPT(lock) ((TBool)NKern::PreemptionPoint()) |
|
196 |
||
197 |
||
198 |
/** Read/Write Spin lock |
|
199 |
||
200 |
@internalComponent |
|
201 |
*/ |
|
202 |
class TRWSpinLock |
|
203 |
{ |
|
204 |
public: |
|
205 |
IMPORT_C TRWSpinLock(TUint aOrder); // Uses same order space as TSpinLock |
|
206 |
private: |
|
207 |
volatile TUint64 iLock; |
|
208 |
}; |
|
209 |
||
210 |
||
211 |
/** |
|
212 |
@publishedPartner |
|
213 |
@prototype |
|
214 |
*/ |
|
215 |
#define __SPIN_LOCK_IRQ_R(lock) ((void)NKern::DisableAllInterrupts()) |
|
216 |
||
217 |
/** |
|
218 |
@publishedPartner |
|
219 |
@prototype |
|
220 |
*/ |
|
221 |
#define __SPIN_UNLOCK_IRQ_R(lock) (NKern::EnableAllInterrupts()) |
|
222 |
||
223 |
/** |
|
224 |
@publishedPartner |
|
225 |
@prototype |
|
226 |
*/ |
|
227 |
#define __SPIN_FLASH_IRQ_R(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE)) |
|
228 |
||
229 |
/** |
|
230 |
@publishedPartner |
|
231 |
@prototype |
|
232 |
*/ |
|
233 |
#define __SPIN_LOCK_IRQ_W(lock) ((void)NKern::DisableAllInterrupts()) |
|
234 |
||
235 |
/** |
|
236 |
@publishedPartner |
|
237 |
@prototype |
|
238 |
*/ |
|
239 |
#define __SPIN_UNLOCK_IRQ_W(lock) (NKern::EnableAllInterrupts()) |
|
240 |
||
241 |
/** |
|
242 |
@publishedPartner |
|
243 |
@prototype |
|
244 |
*/ |
|
245 |
#define __SPIN_FLASH_IRQ_W(lock) (NKern::EnableAllInterrupts(),(void)NKern::DisableAllInterrupts(),((TBool)TRUE)) |
|
246 |
||
247 |
||
248 |
/** |
|
249 |
@publishedPartner |
|
250 |
@prototype |
|
251 |
*/ |
|
252 |
#define __SPIN_LOCK_R(lock) |
|
253 |
||
254 |
/** |
|
255 |
@publishedPartner |
|
256 |
@prototype |
|
257 |
*/ |
|
258 |
#define __SPIN_UNLOCK_R(lock) |
|
259 |
||
260 |
/** |
|
261 |
@internalComponent |
|
262 |
*/ |
|
263 |
#define __SPIN_FLASH_R(lock) ((TBool)FALSE) |
|
264 |
||
265 |
/** |
|
266 |
@publishedPartner |
|
267 |
@prototype |
|
268 |
*/ |
|
269 |
#define __SPIN_LOCK_W(lock) |
|
270 |
||
271 |
/** |
|
272 |
@publishedPartner |
|
273 |
@prototype |
|
274 |
*/ |
|
275 |
#define __SPIN_UNLOCK_W(lock) |
|
276 |
||
277 |
/** |
|
278 |
@internalComponent |
|
279 |
*/ |
|
280 |
#define __SPIN_FLASH_W(lock) ((TBool)FALSE) |
|
281 |
||
282 |
||
283 |
/** |
|
284 |
@publishedPartner |
|
285 |
@prototype |
|
286 |
*/ |
|
287 |
#define __SPIN_LOCK_IRQSAVE_R(lock) (NKern::DisableAllInterrupts()) |
|
288 |
||
289 |
/** |
|
290 |
@publishedPartner |
|
291 |
@prototype |
|
292 |
*/ |
|
293 |
#define __SPIN_UNLOCK_IRQRESTORE_R(lock,irq) (NKern::RestoreInterrupts(irq)) |
|
294 |
||
295 |
/** |
|
296 |
@publishedPartner |
|
297 |
@prototype |
|
298 |
*/ |
|
299 |
#define __SPIN_FLASH_IRQRESTORE_R(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE)) |
|
300 |
||
301 |
/** |
|
302 |
@publishedPartner |
|
303 |
@prototype |
|
304 |
*/ |
|
305 |
#define __SPIN_LOCK_IRQSAVE_W(lock) (NKern::DisableAllInterrupts()) |
|
306 |
||
307 |
/** |
|
308 |
@publishedPartner |
|
309 |
@prototype |
|
310 |
*/ |
|
311 |
#define __SPIN_UNLOCK_IRQRESTORE_W(lock,irq) (NKern::RestoreInterrupts(irq)) |
|
312 |
||
313 |
/** |
|
314 |
@publishedPartner |
|
315 |
@prototype |
|
316 |
*/ |
|
317 |
#define __SPIN_FLASH_IRQRESTORE_W(lock,irq) (NKern::RestoreInterrupts(irq),((void)NKern::DisableAllInterrupts()),((TBool)TRUE)) |
|
318 |
||
319 |
||
320 |
/** |
|
321 |
@publishedPartner |
|
322 |
@prototype |
|
323 |
*/ |
|
324 |
#define __SPIN_FLASH_PREEMPT_R(lock) ((TBool)NKern::PreemptionPoint()) |
|
325 |
||
326 |
/** |
|
327 |
@publishedPartner |
|
328 |
@prototype |
|
329 |
*/ |
|
330 |
#define __SPIN_FLASH_PREEMPT_W(lock) ((TBool)NKern::PreemptionPoint()) |
|
331 |
||
332 |
||
333 |
/** Nanokernel fast semaphore |
|
334 |
||
335 |
A light-weight semaphore class that only supports a single waiting thread, |
|
336 |
suitable for the Symbian OS thread I/O semaphore. |
|
337 |
||
338 |
Initialising a NFastSemaphore involves two steps: |
|
339 |
||
340 |
- Constructing the semaphore |
|
341 |
- Setting the semaphore owning thread (the one allowed to wait on it) |
|
342 |
||
343 |
For example, creating one for the current thread to wait on: |
|
344 |
||
345 |
@code |
|
346 |
NFastSemaphore sem; |
|
347 |
sem.iOwningThread = NKern::CurrentThread(); |
|
348 |
@endcode |
|
349 |
||
350 |
@publishedPartner |
|
351 |
@released |
|
352 |
*/ |
|
353 |
class NFastSemaphore |
|
354 |
{ |
|
355 |
public: |
|
356 |
inline NFastSemaphore(); |
|
357 |
inline NFastSemaphore(NThreadBase* aThread); |
|
358 |
IMPORT_C void SetOwner(NThreadBase* aThread); |
|
359 |
IMPORT_C void Wait(); |
|
360 |
IMPORT_C void Signal(); |
|
361 |
IMPORT_C void SignalN(TInt aCount); |
|
362 |
IMPORT_C void Reset(); |
|
363 |
void WaitCancel(); |
|
364 |
public: |
|
365 |
TInt iCount; /**< @internalComponent */ |
|
366 |
||
367 |
/** The thread allowed to wait on the semaphore |
|
368 |
@internalComponent |
|
369 |
*/ |
|
370 |
NThreadBase* iOwningThread; |
|
371 |
}; |
|
372 |
||
373 |
/** Create a fast semaphore |
|
374 |
||
375 |
@publishedPartner |
|
376 |
@released |
|
377 |
*/ |
|
378 |
inline NFastSemaphore::NFastSemaphore() |
|
379 |
: iCount(0), iOwningThread(NULL) |
|
380 |
{} |
|
381 |
||
382 |
/** Nanokernel fast mutex |
|
383 |
||
384 |
A light-weight priority-inheritance mutex that can be used if the following |
|
385 |
conditions apply: |
|
386 |
||
387 |
- Threads that hold the mutex never block. |
|
388 |
- The mutex is never acquired in a nested fashion |
|
389 |
||
390 |
If either of these conditions is not met, a DMutex object is more appropriate. |
|
391 |
||
392 |
@publishedPartner |
|
393 |
@released |
|
394 |
*/ |
|
395 |
class NFastMutex |
|
396 |
{ |
|
397 |
public: |
|
398 |
IMPORT_C NFastMutex(); |
|
399 |
IMPORT_C void Wait(); |
|
400 |
IMPORT_C void Signal(); |
|
401 |
IMPORT_C TBool HeldByCurrentThread(); /**< @internalComponent */ |
|
402 |
public: |
|
403 |
NThreadBase* iHoldingThread; /**< @internalComponent */ |
|
404 |
||
405 |
/** MUST ALWAYS BE 0 or 1 |
|
406 |
@internalComponent |
|
407 |
*/ |
|
408 |
TInt iWaiting; |
|
409 |
}; |
|
410 |
||
411 |
||
412 |
/** |
|
413 |
@publishedPartner |
|
414 |
@released |
|
415 |
||
416 |
The type of the callback function used by the nanokernel timer. |
|
417 |
||
418 |
@see NTimer |
|
419 |
*/ |
|
420 |
typedef void (*NTimerFn)(TAny*); |
|
421 |
||
422 |
||
423 |
||
424 |
||
425 |
/** |
|
426 |
@publishedPartner |
|
427 |
@released |
|
428 |
||
429 |
A basic relative timer provided by the nanokernel. |
|
430 |
||
431 |
It can generate either a one-shot interrupt or periodic interrupts. |
|
432 |
||
433 |
A timeout handler is called when the timer expires, either: |
|
434 |
- from the timer ISR - if the timer is queued via OneShot(TInt aTime) or OneShot(TInt aTime, TBool EFalse), or |
|
435 |
- from the nanokernel timer dfc1 thread - if the timer is queued via OneShot(TInt aTime, TBool ETrue) call, or |
|
436 |
- from any other dfc thread that provided DFC belongs to - if the timer is queued via OneShot(TInt aTime, TDfc& aDfc) call. |
|
437 |
Call-back mechanism cannot be changed in the life time of a timer. |
|
438 |
||
439 |
These timer objects may be manipulated from any context. |
|
440 |
The timers are driven from a periodic system tick interrupt, |
|
441 |
usually a 1ms period. |
|
442 |
||
443 |
@see NTimerFn |
|
444 |
*/ |
|
445 |
class NTimer : public SDblQueLink |
|
446 |
{ |
|
447 |
public: |
|
448 |
/** |
|
449 |
Default constructor. |
|
450 |
*/ |
|
451 |
inline NTimer() |
|
452 |
: iState(EIdle) |
|
453 |
{} |
|
454 |
/** |
|
455 |
Constructor taking a callback function and a pointer to be passed |
|
456 |
to the callback function. |
|
457 |
||
458 |
@param aFunction The callback function. |
|
459 |
@param aPtr A pointer to be passed to the callback function |
|
460 |
when called. |
|
461 |
*/ |
|
462 |
inline NTimer(NTimerFn aFunction, TAny* aPtr) |
|
463 |
: iPtr(aPtr), iFunction(aFunction), iState(EIdle) |
|
464 |
{} |
|
465 |
IMPORT_C TInt OneShot(TInt aTime); |
|
466 |
IMPORT_C TInt OneShot(TInt aTime, TBool aDfc); |
|
467 |
IMPORT_C TInt OneShot(TInt aTime, TDfc& aDfc); |
|
468 |
IMPORT_C TInt Again(TInt aTime); |
|
469 |
IMPORT_C TBool Cancel(); |
|
470 |
IMPORT_C TBool IsPending(); |
|
471 |
public: |
|
472 |
/** |
|
473 |
@internalComponent |
|
474 |
*/ |
|
475 |
enum TState |
|
476 |
{ |
|
477 |
EIdle=0, // not queued |
|
478 |
ETransferring=1, // being transferred from holding to ordered queue |
|
479 |
EHolding=2, // on holding queue |
|
480 |
EOrdered=3, // on ordered queue |
|
481 |
ECritical=4, // on ordered queue and in use by queue walk routine |
|
482 |
EFinal=5, // on final queue |
|
483 |
}; |
|
484 |
public: |
|
485 |
/** Argument for callback function or the pointer to TDfc */ |
|
486 |
TAny* iPtr; /**< @internalComponent */ |
|
487 |
||
488 |
/** Pointer to callback function. NULL value indicates that queuing of provided Dfc queue will be done |
|
489 |
instead of calling callback function on completion */ |
|
490 |
NTimerFn iFunction; /**< @internalComponent */ |
|
491 |
||
492 |
TUint32 iTriggerTime; /**< @internalComponent */ |
|
493 |
TUint8 iCompleteInDfc; /**< @internalComponent */ |
|
494 |
TUint8 iState; /**< @internalComponent */ |
|
495 |
TUint8 iPad1; /**< @internalComponent */ |
|
496 |
||
497 |
/** Available for timer client to use. |
|
498 |
@internalTechnology */ |
|
499 |
TUint8 iUserFlags; |
|
500 |
}; |
|
501 |
||
502 |
/** |
|
503 |
@internalTechnology |
|
504 |
*/ |
|
505 |
#define i_NTimer_iUserFlags iUserFlags |
|
506 |
||
507 |
/** |
|
508 |
@internalComponent |
|
509 |
*/ |
|
510 |
#define i_NTimer_iState iState |
|
511 |
||
512 |
/** |
|
513 |
@publishedPartner |
|
514 |
@released |
|
515 |
*/ |
|
516 |
typedef void (*NThreadFunction)(TAny*); |
|
517 |
||
518 |
/** |
|
519 |
@publishedPartner |
|
520 |
@released |
|
521 |
*/ |
|
522 |
typedef TDfc* (*NThreadExitHandler)(NThread*); |
|
523 |
||
524 |
/** |
|
525 |
@publishedPartner |
|
526 |
@released |
|
527 |
*/ |
|
528 |
typedef void (*NThreadStateHandler)(NThread*,TInt,TInt); |
|
529 |
||
530 |
/** |
|
531 |
@publishedPartner |
|
532 |
@released |
|
533 |
*/ |
|
534 |
typedef void (*NThreadExceptionHandler)(TAny*,NThread*); |
|
535 |
||
536 |
/** |
|
537 |
@publishedPartner |
|
538 |
@released |
|
539 |
*/ |
|
540 |
typedef void (*NThreadTimeoutHandler)(NThread*,TInt); |
|
541 |
||
542 |
/** |
|
543 |
@publishedPartner |
|
544 |
@released |
|
545 |
*/ |
|
546 |
struct SNThreadHandlers |
|
547 |
{ |
|
548 |
NThreadExitHandler iExitHandler; |
|
549 |
NThreadStateHandler iStateHandler; |
|
550 |
NThreadExceptionHandler iExceptionHandler; |
|
551 |
NThreadTimeoutHandler iTimeoutHandler; |
|
552 |
}; |
|
553 |
||
554 |
/** @internalComponent */ |
|
555 |
extern void NThread_Default_State_Handler(NThread*, TInt, TInt); |
|
556 |
||
557 |
/** @internalComponent */ |
|
558 |
extern void NThread_Default_Exception_Handler(TAny*, NThread*); |
|
559 |
||
560 |
/** @internalComponent */ |
|
561 |
#define NTHREAD_DEFAULT_EXIT_HANDLER ((NThreadExitHandler)0) |
|
562 |
||
563 |
/** @internalComponent */ |
|
564 |
#define NTHREAD_DEFAULT_STATE_HANDLER (&NThread_Default_State_Handler) |
|
565 |
||
566 |
/** @internalComponent */ |
|
567 |
#define NTHREAD_DEFAULT_EXCEPTION_HANDLER (&NThread_Default_Exception_Handler) |
|
568 |
||
569 |
/** @internalComponent */ |
|
570 |
#define NTHREAD_DEFAULT_TIMEOUT_HANDLER ((NThreadTimeoutHandler)0) |
|
571 |
||
572 |
||
573 |
/** |
|
574 |
@publishedPartner |
|
575 |
@released |
|
576 |
*/ |
|
577 |
struct SFastExecTable |
|
578 |
{ |
|
579 |
TInt iFastExecCount; // includes implicit function#0 |
|
580 |
TLinAddr iFunction[1]; // first entry is for call number 1 |
|
581 |
}; |
|
582 |
||
583 |
/** |
|
584 |
@publishedPartner |
|
585 |
@released |
|
586 |
*/ |
|
587 |
const TUint32 KExecFlagClaim=0x80000000; // claim system lock |
|
588 |
||
589 |
/** |
|
590 |
@publishedPartner |
|
591 |
@released |
|
592 |
*/ |
|
593 |
const TUint32 KExecFlagRelease=0x40000000; // release system lock |
|
594 |
||
595 |
/** |
|
596 |
@publishedPartner |
|
597 |
@released |
|
598 |
*/ |
|
599 |
const TUint32 KExecFlagPreprocess=0x20000000; // preprocess |
|
600 |
||
601 |
/** |
|
602 |
@publishedPartner |
|
603 |
@released |
|
604 |
*/ |
|
605 |
const TUint32 KExecFlagExtraArgMask=0x1C000000; // 3 bits indicating additional arguments |
|
606 |
||
607 |
/** |
|
608 |
@publishedPartner |
|
609 |
@released |
|
610 |
*/ |
|
611 |
const TUint32 KExecFlagExtraArgs2=0x04000000; // 2 additional arguments |
|
612 |
||
613 |
/** |
|
614 |
@publishedPartner |
|
615 |
@released |
|
616 |
*/ |
|
617 |
const TUint32 KExecFlagExtraArgs3=0x08000000; // 3 additional arguments |
|
618 |
||
619 |
/** |
|
620 |
@publishedPartner |
|
621 |
@released |
|
622 |
*/ |
|
623 |
const TUint32 KExecFlagExtraArgs4=0x0C000000; // 4 additional arguments |
|
624 |
||
625 |
/** |
|
626 |
@publishedPartner |
|
627 |
@released |
|
628 |
*/ |
|
629 |
const TUint32 KExecFlagExtraArgs5=0x10000000; // 5 additional arguments |
|
630 |
||
631 |
/** |
|
632 |
@publishedPartner |
|
633 |
@released |
|
634 |
*/ |
|
635 |
const TUint32 KExecFlagExtraArgs6=0x14000000; // 6 additional arguments |
|
636 |
||
637 |
/** |
|
638 |
@publishedPartner |
|
639 |
@released |
|
640 |
*/ |
|
641 |
const TUint32 KExecFlagExtraArgs7=0x18000000; // 7 additional arguments |
|
642 |
||
643 |
/** |
|
644 |
@publishedPartner |
|
645 |
@released |
|
646 |
*/ |
|
647 |
const TUint32 KExecFlagExtraArgs8=0x1C000000; // 8 additional arguments |
|
648 |
||
649 |
||
650 |
/** |
|
651 |
@publishedPartner |
|
652 |
@released |
|
653 |
*/ |
|
654 |
struct SSlowExecEntry |
|
655 |
{ |
|
656 |
TUint32 iFlags; // information about call |
|
657 |
TLinAddr iFunction; // address of function to be called |
|
658 |
}; |
|
659 |
||
660 |
||
661 |
/** |
|
662 |
@publishedPartner |
|
663 |
@released |
|
664 |
*/ |
|
665 |
struct SSlowExecTable |
|
666 |
{ |
|
667 |
TInt iSlowExecCount; |
|
668 |
TLinAddr iInvalidExecHandler; // used if call number invalid |
|
669 |
TLinAddr iPreprocessHandler; // used for handle lookups |
|
670 |
SSlowExecEntry iEntries[1]; // first entry is for call number 0 |
|
671 |
}; |
|
672 |
||
673 |
// Thread iAttributes Constants |
|
674 |
const TUint8 KThreadAttImplicitSystemLock=1; /**< @internalComponent */ |
|
675 |
const TUint8 KThreadAttAddressSpace=2; /**< @internalComponent */ |
|
676 |
const TUint8 KThreadAttLoggable=4; /**< @internalComponent */ |
|
677 |
const TUint8 KThreadAttDelayed=8; /**< @internalComponent */ |
|
678 |
||
679 |
||
680 |
// Thread CPU |
|
681 |
const TUint32 KCpuAffinityAny=0xffffffffu; /**< @internalComponent */ |
|
682 |
||
683 |
/** Information needed for creating a nanothread. |
|
684 |
||
685 |
@publishedPartner |
|
686 |
@released |
|
687 |
*/ |
|
688 |
struct SNThreadCreateInfo |
|
689 |
{ |
|
690 |
NThreadFunction iFunction; |
|
691 |
TAny* iStackBase; |
|
692 |
TInt iStackSize; |
|
693 |
TInt iPriority; |
|
694 |
TInt iTimeslice; |
|
695 |
TUint8 iAttributes; |
|
696 |
TUint32 iCpuAffinity; |
|
697 |
const SNThreadHandlers* iHandlers; |
|
698 |
const SFastExecTable* iFastExecTable; |
|
699 |
const SSlowExecTable* iSlowExecTable; |
|
700 |
const TUint32* iParameterBlock; |
|
701 |
TInt iParameterBlockSize; // if zero, iParameterBlock _is_ the initial data |
|
702 |
// otherwise it points to n bytes of initial data |
|
703 |
}; |
|
704 |
||
705 |
/** Constant for use with NKern:: functions which release a fast mutex as well |
|
706 |
as performing some other operations. |
|
707 |
||
708 |
@publishedPartner |
|
709 |
@released |
|
710 |
*/ |
|
711 |
#define SYSTEM_LOCK (NFastMutex*)0 |
|
712 |
||
713 |
||
714 |
/** Idle handler function |
|
715 |
Pointer to a function which is called whenever a CPU goes idle |
|
716 |
||
717 |
@param aPtr The iPtr stored in the SCpuIdleHandler structure |
|
43
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
718 |
@param aStage Bits 0-7 give a bitmask of CPUs now active, i.e. 0 means all processors now idle |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
719 |
Bit 31 set indicates that the current core can now be powered down |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
720 |
Bit 30 set indicates that other cores still remain to be retired |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
721 |
Bit 29 set indicates that postamble processing is required after waking up |
0 | 722 |
|
723 |
@internalComponent |
|
724 |
*/ |
|
43
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
725 |
typedef void (*TCpuIdleHandlerFn)(TAny* aPtr, TUint32 aStage); |
0 | 726 |
|
727 |
/** Idle handler structure |
|
728 |
||
729 |
@internalComponent |
|
730 |
*/ |
|
731 |
struct SCpuIdleHandler |
|
732 |
{ |
|
43
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
733 |
/** |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
734 |
Defined flag bits in aStage parameter |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
735 |
*/ |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
736 |
enum |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
737 |
{ |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
738 |
EActiveCpuMask=0xFFu, |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
739 |
EPostamble=1u<<29, // postamble needed |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
740 |
EMore=1u<<30, // more cores still to be retired |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
741 |
ERetire=1u<<31, // this core can now be retired |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
742 |
}; |
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
743 |
|
0 | 744 |
TCpuIdleHandlerFn iHandler; |
745 |
TAny* iPtr; |
|
746 |
volatile TBool iPostambleRequired; |
|
747 |
}; |
|
748 |
||
749 |
||
750 |
/** |
|
751 |
@internalComponent |
|
752 |
*/ |
|
753 |
enum TUserModeCallbackReason |
|
754 |
{ |
|
755 |
EUserModeCallbackRun, |
|
756 |
EUserModeCallbackCancel, |
|
757 |
}; |
|
758 |
||
759 |
||
760 |
/** |
|
761 |
A callback function executed when a thread returns to user mode. |
|
762 |
||
763 |
@internalComponent |
|
764 |
*/ |
|
765 |
typedef void (*TUserModeCallbackFunc)(TAny* aThisPtr, TUserModeCallbackReason aReasonCode); |
|
766 |
||
767 |
||
768 |
/** |
|
769 |
An object representing a queued callback to be executed when a thread returns to user mode. |
|
770 |
||
771 |
@internalComponent |
|
772 |
*/ |
|
773 |
class TUserModeCallback |
|
774 |
{ |
|
775 |
public: |
|
776 |
TUserModeCallback(TUserModeCallbackFunc); |
|
777 |
~TUserModeCallback(); |
|
778 |
||
779 |
public: |
|
780 |
TUserModeCallback* volatile iNext; |
|
781 |
TUserModeCallbackFunc iFunc; |
|
782 |
}; |
|
783 |
||
784 |
TUserModeCallback* const KUserModeCallbackUnqueued = ((TUserModeCallback*)1); |
|
785 |
||
786 |
||
787 |
/** Nanokernel functions |
|
788 |
||
789 |
@publishedPartner |
|
790 |
@released |
|
791 |
*/ |
|
792 |
class NKern |
|
793 |
{ |
|
794 |
public: |
|
795 |
/** Bitmask values used when blocking a nanothread. |
|
796 |
@see NKern::Block() |
|
797 |
*/ |
|
798 |
enum TBlockMode |
|
799 |
{ |
|
800 |
EEnterCS=1, /**< Enter thread critical section before blocking */ |
|
801 |
ERelease=2, /**< Release specified fast mutex before blocking */ |
|
802 |
EClaim=4, /**< Re-acquire specified fast mutex when unblocked */ |
|
803 |
EObstruct=8, /**< Signifies obstruction of thread rather than lack of work to do */ |
|
804 |
}; |
|
805 |
||
806 |
/** Values that specify the context of the processor. |
|
807 |
@see NKern::CurrentContext() |
|
808 |
*/ |
|
809 |
enum TContext |
|
810 |
{ |
|
811 |
EThread=0, /**< The processor is in a thread context*/ |
|
812 |
EIDFC=1, /**< The processor is in an IDFC context*/ |
|
813 |
EInterrupt=2, /**< The processor is in an interrupt context*/ |
|
814 |
EEscaped=KMaxTInt /**< Not valid a process context on target hardware*/ |
|
815 |
}; |
|
816 |
||
817 |
public: |
|
818 |
// Threads |
|
819 |
IMPORT_C static TInt ThreadCreate(NThread* aThread, SNThreadCreateInfo& anInfo); |
|
820 |
IMPORT_C static TBool ThreadSuspend(NThread* aThread, TInt aCount); |
|
821 |
IMPORT_C static TBool ThreadResume(NThread* aThread); |
|
822 |
IMPORT_C static TBool ThreadResume(NThread* aThread, NFastMutex* aMutex); |
|
823 |
IMPORT_C static TBool ThreadForceResume(NThread* aThread); |
|
824 |
IMPORT_C static TBool ThreadForceResume(NThread* aThread, NFastMutex* aMutex); |
|
825 |
IMPORT_C static void ThreadRelease(NThread* aThread, TInt aReturnValue); |
|
826 |
IMPORT_C static void ThreadRelease(NThread* aThread, TInt aReturnValue, NFastMutex* aMutex); |
|
827 |
IMPORT_C static void ThreadSetPriority(NThread* aThread, TInt aPriority); |
|
828 |
IMPORT_C static void ThreadSetPriority(NThread* aThread, TInt aPriority, NFastMutex* aMutex); |
|
43
c1f20ce4abcf
Revision: 201035
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
829 |
static void ThreadSetNominalPriority(NThread* aThread, TInt aPriority); |
0 | 830 |
IMPORT_C static void ThreadRequestSignal(NThread* aThread); |
831 |
IMPORT_C static void ThreadRequestSignal(NThread* aThread, NFastMutex* aMutex); |
|
832 |
IMPORT_C static void ThreadRequestSignal(NThread* aThread, TInt aCount); |
|
833 |
IMPORT_C static void ThreadKill(NThread* aThread); |
|
834 |
IMPORT_C static void ThreadKill(NThread* aThread, NFastMutex* aMutex); |
|
835 |
IMPORT_C static void ThreadEnterCS(); |
|
836 |
IMPORT_C static void ThreadLeaveCS(); |
|
837 |
static NThread* _ThreadEnterCS(); /**< @internalComponent */ |
|
838 |
static void _ThreadLeaveCS(); /**< @internalComponent */ |
|
839 |
IMPORT_C static TInt Block(TUint32 aTimeout, TUint aMode, NFastMutex* aMutex); |
|
840 |
IMPORT_C static TInt Block(TUint32 aTimeout, TUint aMode); |
|
841 |
IMPORT_C static void NanoBlock(TUint32 aTimeout, TUint aState, TAny* aWaitObj); |
|
842 |
IMPORT_C static void ThreadGetUserContext(NThread* aThread, TAny* aContext, TUint32& aAvailRegistersMask); |
|
843 |
IMPORT_C static void ThreadSetUserContext(NThread* aThread, TAny* aContext); |
|
844 |
IMPORT_C static void ThreadGetSystemContext(NThread* aThread, TAny* aContext, TUint32& aAvailRegistersMask); |
|
845 |
static void ThreadModifyUsp(NThread* aThread, TLinAddr aUsp); |
|
846 |
IMPORT_C static TInt FreezeCpu(); /**< @internalComponent */ |
|
847 |
IMPORT_C static void EndFreezeCpu(TInt aCookie); /**< @internalComponent */ |
|
848 |
IMPORT_C static TUint32 ThreadSetCpuAffinity(NThread* aThread, TUint32 aAffinity); /**< @internalComponent */ |
|
849 |
IMPORT_C static void ThreadSetTimeslice(NThread* aThread, TInt aTimeslice); /**< @internalComponent */ |
|
850 |
IMPORT_C static TUint64 ThreadCpuTime(NThread* aThread); /**< @internalComponent */ |
|
851 |
IMPORT_C static TUint32 CpuTimeMeasFreq(); /**< @internalComponent */ |
|
852 |
static TInt QueueUserModeCallback(NThreadBase* aThread, TUserModeCallback* aCallback); /**< @internalComponent */ |
|
853 |
static void MoveUserModeCallbacks(NThreadBase* aSrcThread, NThreadBase* aDestThread); /**< @internalComponent */ |
|
854 |
static void CancelUserModeCallbacks(); /**< @internalComponent */ |
|
855 |
||
856 |
// Fast semaphores |
|
857 |
IMPORT_C static void FSSetOwner(NFastSemaphore* aSem,NThreadBase* aThread); |
|
858 |
IMPORT_C static void FSWait(NFastSemaphore* aSem); |
|
859 |
IMPORT_C static void FSSignal(NFastSemaphore* aSem); |
|
860 |
IMPORT_C static void FSSignal(NFastSemaphore* aSem, NFastMutex* aMutex); |
|
861 |
IMPORT_C static void FSSignalN(NFastSemaphore* aSem, TInt aCount); |
|
862 |
IMPORT_C static void FSSignalN(NFastSemaphore* aSem, TInt aCount, NFastMutex* aMutex); |
|
863 |
||
864 |
// Fast mutexes |
|
865 |
IMPORT_C static void FMWait(NFastMutex* aMutex); |
|
866 |
IMPORT_C static void FMSignal(NFastMutex* aMutex); |
|
867 |
IMPORT_C static TBool FMFlash(NFastMutex* aMutex); |
|
868 |
||
869 |
// Scheduler |
|
870 |
IMPORT_C static void Lock(); |
|
871 |
IMPORT_C static NThread* LockC(); |
|
872 |
IMPORT_C static void Unlock(); |
|
873 |
IMPORT_C static TInt PreemptionPoint(); |
|
874 |
||
875 |
// Interrupts |
|
876 |
IMPORT_C static TInt DisableAllInterrupts(); |
|
877 |
IMPORT_C static TInt DisableInterrupts(TInt aLevel); |
|
878 |
IMPORT_C static void RestoreInterrupts(TInt aRestoreData); |
|
879 |
IMPORT_C static void EnableAllInterrupts(); |
|
880 |
||
881 |
// Read-modify-write |
|
882 |
inline static TInt LockedInc(TInt& aCount) |
|
883 |
{ return __e32_atomic_add_ord32(&aCount,1); } |
|
884 |
inline static TInt LockedDec(TInt& aCount) |
|
885 |
{ return __e32_atomic_add_ord32(&aCount,0xffffffff); } |
|
886 |
inline static TInt LockedAdd(TInt& aDest, TInt aSrc) |
|
887 |
{ return __e32_atomic_add_ord32(&aDest,aSrc); } |
|
888 |
inline static TInt64 LockedInc(TInt64& aCount) |
|
889 |
{ return __e32_atomic_add_ord64(&aCount,1); } |
|
890 |
inline static TInt64 LockedDec(TInt64& aCount) |
|
891 |
{ return __e32_atomic_add_ord64(&aCount,TUint64(TInt64(-1))); } |
|
892 |
inline static TInt64 LockedAdd(TInt64& aDest, TInt64 aSrc) /**< @internalComponent */ |
|
893 |
{ return __e32_atomic_add_ord64(&aDest,aSrc); } |
|
894 |
inline static TUint32 LockedSetClear(TUint32& aDest, TUint32 aClearMask, TUint32 aSetMask) |
|
895 |
{ return __e32_atomic_axo_ord32(&aDest,~(aClearMask|aSetMask),aSetMask); } |
|
896 |
inline static TUint16 LockedSetClear16(TUint16& aDest, TUint16 aClearMask, TUint16 aSetMask) /**< @internalComponent */ |
|
897 |
{ return __e32_atomic_axo_ord16(&aDest,TUint16(~(aClearMask|aSetMask)),aSetMask); } |
|
898 |
inline static TUint8 LockedSetClear8(TUint8& aDest, TUint8 aClearMask, TUint8 aSetMask) |
|
899 |
{ return __e32_atomic_axo_ord8(&aDest,TUint8(~(aClearMask|aSetMask)),aSetMask); } |
|
900 |
inline static TInt SafeInc(TInt& aCount) |
|
901 |
{ return __e32_atomic_tas_ord32(&aCount,1,1,0); } |
|
902 |
inline static TInt SafeDec(TInt& aCount) |
|
903 |
{ return __e32_atomic_tas_ord32(&aCount,1,-1,0); } |
|
904 |
inline static TInt AddIfGe(TInt& aCount, TInt aLimit, TInt aInc) /**< @internalComponent */ |
|
905 |
{ return __e32_atomic_tas_ord32(&aCount,aLimit,aInc,0); } |
|
906 |
inline static TInt AddIfLt(TInt& aCount, TInt aLimit, TInt aInc) /**< @internalComponent */ |
|
907 |
{ return __e32_atomic_tas_ord32(&aCount,aLimit,0,aInc); } |
|
908 |
inline static TAny* SafeSwap(TAny* aNewValue, TAny*& aPtr) |
|
909 |
{ return __e32_atomic_swp_ord_ptr(&aPtr, aNewValue); } |
|
910 |
inline static TUint8 SafeSwap8(TUint8 aNewValue, TUint8& aPtr) |
|
911 |
{ return __e32_atomic_swp_ord8(&aPtr, aNewValue); } |
|
912 |
inline static TUint16 SafeSwap16(TUint16 aNewValue, TUint16& aPtr) /**< @internalComponent */ |
|
913 |
{ return __e32_atomic_swp_ord16(&aPtr, aNewValue); } |
|
914 |
inline static TBool CompareAndSwap(TAny*& aPtr, TAny* aExpected, TAny* aNew) /**< @internalComponent */ |
|
915 |
{ return __e32_atomic_cas_ord_ptr(&aPtr, &aExpected, aNew); } |
|
916 |
inline static TBool CompareAndSwap8(TUint8& aPtr, TUint8 aExpected, TUint8 aNew) /**< @internalComponent */ |
|
917 |
{ return __e32_atomic_cas_ord8(&aPtr, (TUint8*)&aExpected, (TUint8)aNew); } |
|
918 |
inline static TBool CompareAndSwap16(TUint16& aPtr, TUint16 aExpected, TUint16 aNew) /**< @internalComponent */ |
|
919 |
{ return __e32_atomic_cas_ord16(&aPtr, (TUint16*)&aExpected, (TUint16)aNew); } |
|
920 |
inline static TUint32 SafeSwap(TUint32 aNewValue, TUint32& aPtr) /**< @internalComponent */ |
|
921 |
{ return __e32_atomic_swp_ord32(&aPtr, aNewValue); } |
|
922 |
inline static TUint SafeSwap(TUint aNewValue, TUint& aPtr) /**< @internalComponent */ |
|
923 |
{ return __e32_atomic_swp_ord32(&aPtr, aNewValue); } |
|
924 |
inline static TInt SafeSwap(TInt aNewValue, TInt& aPtr) /**< @internalComponent */ |
|
925 |
{ return __e32_atomic_swp_ord32(&aPtr, aNewValue); } |
|
926 |
inline static TBool CompareAndSwap(TUint32& aPtr, TUint32 aExpected, TUint32 aNew) /**< @internalComponent */ |
|
927 |
{ return __e32_atomic_cas_ord32(&aPtr, &aExpected, aNew); } |
|
928 |
inline static TBool CompareAndSwap(TUint& aPtr, TUint aExpected, TUint aNew) /**< @internalComponent */ |
|
929 |
{ return __e32_atomic_cas_ord32(&aPtr, (TUint32*)&aExpected, (TUint32)aNew); } |
|
930 |
inline static TBool CompareAndSwap(TInt& aPtr, TInt aExpected, TInt aNew) /**< @internalComponent */ |
|
931 |
{ return __e32_atomic_cas_ord32(&aPtr, (TUint32*)&aExpected, (TUint32)aNew); } |
|
932 |
||
933 |
||
934 |
// Miscellaneous |
|
935 |
IMPORT_C static NThread* CurrentThread(); |
|
936 |
IMPORT_C static TInt CurrentCpu(); /**< @internalComponent */ |
|
937 |
IMPORT_C static TInt NumberOfCpus(); /**< @internalComponent */ |
|
938 |
IMPORT_C static void LockSystem(); |
|
939 |
IMPORT_C static void UnlockSystem(); |
|
940 |
IMPORT_C static TBool FlashSystem(); |
|
941 |
IMPORT_C static void WaitForAnyRequest(); |
|
942 |
IMPORT_C static void Sleep(TUint32 aTime); |
|
943 |
IMPORT_C static void Exit(); |
|
944 |
IMPORT_C static void DeferredExit(); |
|
945 |
IMPORT_C static void YieldTimeslice(); /**< @internalComponent */ |
|
946 |
IMPORT_C static void RotateReadyList(TInt aPriority); |
|
947 |
IMPORT_C static void RotateReadyList(TInt aPriority, TInt aCpu); /**< @internalTechnology */ |
|
948 |
IMPORT_C static void RecordIntLatency(TInt aLatency, TInt aIntMask); /**< @internalTechnology */ |
|
949 |
IMPORT_C static void RecordThreadLatency(TInt aLatency); /**< @internalTechnology */ |
|
950 |
IMPORT_C static TUint32 TickCount(); |
|
951 |
IMPORT_C static TInt TickPeriod(); |
|
952 |
IMPORT_C static TInt TimerTicks(TInt aMilliseconds); |
|
953 |
IMPORT_C static TInt TimesliceTicks(TUint32 aMicroseconds); /**< @internalTechnology */ |
|
954 |
IMPORT_C static TInt CurrentContext(); |
|
955 |
IMPORT_C static TUint32 FastCounter(); |
|
956 |
IMPORT_C static TInt FastCounterFrequency(); |
|
957 |
static void Init0(TAny* aVariantData); |
|
958 |
static void Init(NThread* aThread, SNThreadCreateInfo& anInfo); |
|
959 |
IMPORT_C static TBool KernelLocked(TInt aCount=0); /**< @internalTechnology */ |
|
960 |
IMPORT_C static NFastMutex* HeldFastMutex(); /**< @internalTechnology */ |
|
961 |
static void Idle(); |
|
962 |
IMPORT_C static SCpuIdleHandler* CpuIdleHandler(); /**< @internalTechnology */ |
|
963 |
static void NotifyCrash(const TAny* a0, TInt a1); /**< @internalTechnology */ |
|
964 |
IMPORT_C static TBool Crashed(); |
|
965 |
static TUint32 IdleGenerationCount(); |
|
966 |
||
967 |
// Debugger support |
|
968 |
typedef void (*TRescheduleCallback)(NThread*); |
|
969 |
IMPORT_C static void SchedulerHooks(TLinAddr& aStart, TLinAddr& aEnd); |
|
970 |
IMPORT_C static void InsertSchedulerHooks(); |
|
971 |
IMPORT_C static void RemoveSchedulerHooks(); |
|
972 |
IMPORT_C static void SetRescheduleCallback(TRescheduleCallback aCallback); |
|
973 |
}; |
|
974 |
||
975 |
||
976 |
/** Create a fast semaphore |
|
977 |
||
978 |
@publishedPartner |
|
979 |
@released |
|
980 |
*/ |
|
981 |
inline NFastSemaphore::NFastSemaphore(NThreadBase* aThread) |
|
982 |
: iCount(0), |
|
983 |
iOwningThread(aThread ? aThread : (NThreadBase*)NKern::CurrentThread()) |
|
984 |
{ |
|
985 |
} |
|
986 |
||
987 |
||
988 |
#endif |