0
|
1 |
// Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
|
|
2 |
// All rights reserved.
|
|
3 |
// This component and the accompanying materials are made available
|
|
4 |
// under the terms of the License "Eclipse Public License v1.0"
|
|
5 |
// which accompanies this distribution, and is available
|
|
6 |
// at the URL "http://www.eclipse.org/legal/epl-v10.html".
|
|
7 |
//
|
|
8 |
// Initial Contributors:
|
|
9 |
// Nokia Corporation - initial contribution.
|
|
10 |
//
|
|
11 |
// Contributors:
|
|
12 |
//
|
|
13 |
// Description:
|
|
14 |
// e32\nkernsmp\x86\ncirq.cia
|
|
15 |
//
|
|
16 |
//
|
|
17 |
|
|
18 |
/**
|
|
19 |
@file
|
|
20 |
@internalTechnology
|
|
21 |
*/
|
|
22 |
|
|
23 |
#include "nk_priv.h"
|
|
24 |
#include "nk_plat.h"
|
|
25 |
#include <nk_irq.h>
|
|
26 |
#include <apic.h>
|
|
27 |
|
|
28 |
#define OFFSET_NIrqHandler_iHState 8
|
|
29 |
|
|
30 |
__ASSERT_COMPILE(_FOFF(NIrqHandler, iHState) == OFFSET_NIrqHandler_iHState);
|
|
31 |
|
|
32 |
//
|
|
33 |
// Atomically increment run count provided ECount set or count <2.
|
|
34 |
// If originally zero, atomically set CPU field
|
|
35 |
// Wait for EWait to be clear
|
|
36 |
// Return state of iIState immediately before increment
|
|
37 |
//
|
|
38 |
__NAKED__ TUint32 NIrq::EnterIsr()
|
|
39 |
{
|
|
40 |
THISCALL_PROLOG0()
|
|
41 |
asm("push ebx ");
|
|
42 |
asm("xor ebx, ebx ");
|
|
43 |
asm("str bx ");
|
|
44 |
asm("sub bl, 0x28 ");
|
|
45 |
asm("shr bl, 3 "); // BL = CPU number
|
|
46 |
asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
|
|
47 |
asm("enterisr_loop: ");
|
|
48 |
asm("mov edx, eax ");
|
|
49 |
asm("cmp edx, 0x10000 "); // compare run count to 1
|
|
50 |
asm("jae short enterisr_not0 "); // skip if >=1
|
|
51 |
asm("mov dh, bl "); // else update CPU
|
|
52 |
asm("enterisr_not0: ");
|
|
53 |
asm("add edx, 0x10000 "); // increment run count
|
|
54 |
asm("cmp edx, 0x20000 "); // compare to 2
|
|
55 |
asm("jb short enterisr_lt2 ");
|
|
56 |
asm("test dl, 6 "); // ECount|ERaw
|
|
57 |
asm("jz short enterisr_wait "); // if !ECount && !ERaw limit count to 2
|
|
58 |
asm("enterisr_lt2: ");
|
|
59 |
asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrq,iIState));
|
|
60 |
asm("jne short enterisr_loop ");
|
|
61 |
|
|
62 |
asm("enterisr_wait: ");
|
|
63 |
asm("mov edx, 1 "); // EWait
|
|
64 |
asm("enterisr_loop1: ");
|
|
65 |
asm("test edx, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
|
|
66 |
asm("jnz short enterisr_loop2 "); // loop while EWait set
|
|
67 |
asm("pop ebx ");
|
|
68 |
asm("lock add dword ptr [esp], 0 ");
|
|
69 |
THISCALL_EPILOG0()
|
|
70 |
|
|
71 |
asm("enterisr_loop2: ");
|
|
72 |
X86_PAUSE
|
|
73 |
asm("jmp short enterisr_loop ");
|
|
74 |
}
|
|
75 |
|
|
76 |
//
|
|
77 |
// Atomically decrement run count
|
|
78 |
// Return TRUE if run count nonzero after decrement
|
|
79 |
//
|
|
80 |
__NAKED__ TBool NIrq::IsrDone()
|
|
81 |
{
|
|
82 |
THISCALL_PROLOG0()
|
|
83 |
asm("mov eax, 0xffff0000 "); // -1<<run count shift
|
|
84 |
asm("lock xadd [ecx+%0], eax" : : "i" _FOFF(NIrq,iIState));
|
|
85 |
asm("shr eax, 16 ");
|
|
86 |
asm("dec eax "); // eax=new run count = TRUE if nonzero
|
|
87 |
THISCALL_EPILOG0()
|
|
88 |
}
|
|
89 |
|
|
90 |
//
|
|
91 |
// Wait (allowing interrupts and preemption) until run count = 0 and EWait clear
|
|
92 |
// Then atomically set EWait and return with interrupts disabled
|
|
93 |
//
|
|
94 |
__NAKED__ void NIrq::Wait()
|
|
95 |
{
|
|
96 |
THISCALL_PROLOG0()
|
|
97 |
asm("wait_loop: ");
|
|
98 |
asm("cli ");
|
|
99 |
asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrq,iIState));
|
|
100 |
asm("wait_loop1: ");
|
|
101 |
asm("mov edx, eax ");
|
|
102 |
asm("test edx, 0xffff0001 "); // test run count and EWait
|
|
103 |
asm("jnz short wait_loop2 "); // if not both zero, must wait
|
|
104 |
asm("inc edx "); // else try to set EWait
|
|
105 |
asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrq,iIState));
|
|
106 |
asm("jne short wait_loop1 "); // someone beat us to it
|
|
107 |
THISCALL_EPILOG0() // success - return with interrupts disabled
|
|
108 |
|
|
109 |
// spin, allowing interrupts, while we wait for run count and EWait both zero
|
|
110 |
asm("wait_loop2: ");
|
|
111 |
asm("sti ");
|
|
112 |
X86_PAUSE
|
|
113 |
asm("jmp short wait_loop ");
|
|
114 |
}
|
|
115 |
|
|
116 |
//
|
|
117 |
// Atomically clear EWait and reenable interrupts
|
|
118 |
//
|
|
119 |
__NAKED__ void NIrq::Done()
|
|
120 |
{
|
|
121 |
THISCALL_PROLOG0()
|
|
122 |
asm("lock and dword ptr [ecx+%0], 0xfffffffe" : : "i" _FOFF(NIrq,iIState));
|
|
123 |
asm("sti ");
|
|
124 |
THISCALL_EPILOG0()
|
|
125 |
}
|
|
126 |
|
|
127 |
|
|
128 |
|
|
129 |
//
|
|
130 |
// atomic { if !EUnbind && !ENotReady clear EDisable }
|
|
131 |
// Return the initial value of iHState
|
|
132 |
//
|
|
133 |
__NAKED__ TUint32 NIrqHandler::DoSetEnabled()
|
|
134 |
{
|
|
135 |
THISCALL_PROLOG0()
|
|
136 |
asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
|
|
137 |
asm("dse_loop: ");
|
|
138 |
asm("mov edx, eax ");
|
|
139 |
asm("test dh, 0x0A "); // EUnbind|ENotReady
|
|
140 |
asm("jnz short dse_end "); // if either set, finished
|
|
141 |
asm("and dh, 0xFA "); // else try to clear EDisable and EBind
|
|
142 |
asm("dse_end: ");
|
|
143 |
asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
|
|
144 |
asm("jne short dse_loop "); // someone beat us to it
|
|
145 |
THISCALL_EPILOG0() // success - return original iHState
|
|
146 |
}
|
|
147 |
|
|
148 |
//
|
|
149 |
// Atomically increment run count by aCount if ECount set or run count initially zero.
|
|
150 |
// If !EDisable and !EUnbind set EActive
|
|
151 |
// Return initial iHState
|
|
152 |
//
|
|
153 |
__NAKED__ TUint32 NIrqHandler::DoActivate(TInt aCount)
|
|
154 |
{
|
|
155 |
THISCALL_PROLOG1()
|
|
156 |
asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
|
|
157 |
asm("da_loop: ");
|
|
158 |
asm("mov edx, eax ");
|
|
159 |
asm("cmp edx, 0x10000 ");
|
|
160 |
asm("jb short da_zero "); // skip if run count initially zero
|
|
161 |
asm("test dh, 0x10 "); // else check ECount
|
|
162 |
asm("jz short da_end "); // if clear, don't increment
|
|
163 |
asm("da_zero: ");
|
|
164 |
asm("mov edx, [esp+4] "); // edx = aCount
|
|
165 |
asm("shl edx, 16 ");
|
|
166 |
asm("add edx, eax "); // increment run count
|
|
167 |
asm("da_end: ");
|
|
168 |
asm("test dh, 0x03 "); // EUnbind|EDisable
|
|
169 |
asm("jnz short da_1 "); // skip if EUnbind or EDisable set
|
|
170 |
asm("or dh, 0x20 "); // set EActive
|
|
171 |
asm("da_1: ");
|
|
172 |
asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
|
|
173 |
asm("jne short da_loop "); // someone beat us to it
|
|
174 |
THISCALL_EPILOG1() // success - return original iHState
|
|
175 |
}
|
|
176 |
|
|
177 |
//
|
|
178 |
// Decrement run count
|
|
179 |
// Return initial iHState
|
|
180 |
//
|
|
181 |
__NAKED__ TUint32 NIrqHandler::EventBegin()
|
|
182 |
{
|
|
183 |
THISCALL_PROLOG0()
|
|
184 |
asm("mov eax, 0xffff0000 "); // -1<<run count shift
|
|
185 |
asm("lock xadd [ecx+%0], eax" : : "i" _FOFF(NIrqHandler,iHState));
|
|
186 |
THISCALL_EPILOG0()
|
|
187 |
}
|
|
188 |
|
|
189 |
//
|
|
190 |
// If count is zero or EDisable or EUnbind
|
|
191 |
// are set, clear EActive.
|
|
192 |
// Return initial iHState, except for new EActive bit
|
|
193 |
//
|
|
194 |
__NAKED__ TUint32 NIrqHandler::EventDone()
|
|
195 |
{
|
|
196 |
THISCALL_PROLOG0()
|
|
197 |
asm("mov eax, [ecx+%0]" : : "i" _FOFF(NIrqHandler,iHState));
|
|
198 |
asm("ed_loop: ");
|
|
199 |
asm("mov edx, eax ");
|
|
200 |
asm("cmp edx, 0x10000 ");
|
|
201 |
asm("jb short ed_rc_0 "); // skip if run count now zero
|
|
202 |
asm("test dh, 0x03 "); // test EUnbind and EDisable
|
|
203 |
asm("jz short ed_1 "); // skip if neither set
|
|
204 |
asm("ed_rc_0: ");
|
|
205 |
asm("and dh, 0xDF "); // clear EActive
|
|
206 |
asm("ed_1: ");
|
|
207 |
asm("lock cmpxchg [ecx+%0], edx" : : "i" _FOFF(NIrqHandler,iHState));
|
|
208 |
asm("jne short ed_loop "); // someone beat us to it
|
|
209 |
asm("or dh, 0xDF "); // set all except EActive
|
|
210 |
asm("and ah, dh "); // clear EActive in return value if we cleared it
|
|
211 |
THISCALL_EPILOG0() // success - return original iHState with new EActive
|
|
212 |
}
|
|
213 |
|
|
214 |
|