|
1 // Copyright (c) 2002-2009 Nokia Corporation and/or its subsidiary(-ies). |
|
2 // All rights reserved. |
|
3 // This component and the accompanying materials are made available |
|
4 // under the terms of the License "Eclipse Public License v1.0" |
|
5 // which accompanies this distribution, and is available |
|
6 // at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
7 // |
|
8 // Initial Contributors: |
|
9 // Nokia Corporation - initial contribution. |
|
10 // |
|
11 // Contributors: |
|
12 // |
|
13 // Description: |
|
14 // e32test\debug\context.cia |
|
15 // |
|
16 // |
|
17 |
|
18 #ifndef __KERNEL_MODE__ |
|
19 #include "context.h" |
|
20 #include <u32exec.h> |
|
21 |
|
22 __NAKED__ void SetRegs() |
|
23 { |
|
24 asm("SetRegs:"); |
|
25 asm("str r1, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR0)); |
|
26 asm("add r1, r1, #1 "); |
|
27 asm("str r1, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR1)); |
|
28 asm("add r2, r1, #1 "); |
|
29 asm("str r2, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR2)); |
|
30 asm("add r3, r2, #1 "); |
|
31 asm("str r3, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR3)); |
|
32 asm("add r4, r3, #1 "); |
|
33 asm("str r4, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR4)); |
|
34 asm("add r5, r4, #1 "); |
|
35 asm("str r5, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR5)); |
|
36 asm("add r6, r5, #1 "); |
|
37 asm("str r6, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR6)); |
|
38 asm("add r7, r6, #1 "); |
|
39 asm("str r7, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR7)); |
|
40 asm("add r8, r7, #1 "); |
|
41 asm("str r8, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR8)); |
|
42 asm("add r9, r8, #1 "); |
|
43 asm("str r9, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR9)); |
|
44 asm("add r10, r9, #1 "); |
|
45 asm("str r10, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR10)); |
|
46 asm("add r11, r10, #1 "); |
|
47 asm("str r11, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR11)); |
|
48 asm("add r12, r11, #1 "); |
|
49 asm("str r12, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR12)); |
|
50 asm("str r13, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR13)); |
|
51 asm("str r14, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR14)); |
|
52 asm("ldr r0, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,iR0)); |
|
53 __JUMP(,lr); |
|
54 } |
|
55 |
|
56 #define CHECK_REGA(reg,val) \ |
|
57 asm("ldr r2, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,reg)); \ |
|
58 asm("ldr r3, ="#val); \ |
|
59 asm("cmp r2, r3"); \ |
|
60 asm("movne r0, #0"); \ |
|
61 __JUMP(ne,lr); |
|
62 |
|
63 #define CHECK_REG(reg) \ |
|
64 asm("ldr r2, [r0, #%a0]" : : "i" _FOFF(TArmRegSet,reg)); \ |
|
65 asm("ldr r3, [r1, #%a0]" : : "i" _FOFF(TArmRegSet,reg)); \ |
|
66 asm("cmp r2, r3"); \ |
|
67 asm("movne r0, #0"); \ |
|
68 __JUMP(ne,lr); |
|
69 |
|
70 |
|
71 |
|
72 __NAKED__ TInt ThreadContextHwExc(TAny*) |
|
73 { |
|
74 asm("stmdb sp!, {r4-r11,lr} "); |
|
75 asm("mov r1, #0x80000000"); |
|
76 asm("bl SetRegs"); |
|
77 asm("ldr r0,[r13, #1]"); // Cause alignment fault |
|
78 asm("ThreadContextHwExc_pc:"); |
|
79 asm("mov r0, #0 "); |
|
80 asm("ldmia sp!, {r4-r11,pc} "); |
|
81 } |
|
82 |
|
83 __NAKED__ TInt CheckContextHwExc(TArmRegSet* aContext,TArmRegSet* aSavedData) |
|
84 { |
|
85 CHECK_REG(iR0); |
|
86 CHECK_REG(iR1); |
|
87 CHECK_REG(iR2); |
|
88 CHECK_REG(iR3); |
|
89 CHECK_REG(iR4); |
|
90 CHECK_REG(iR5); |
|
91 CHECK_REG(iR6); |
|
92 CHECK_REG(iR7); |
|
93 CHECK_REG(iR8); |
|
94 CHECK_REG(iR9); |
|
95 CHECK_REG(iR10); |
|
96 CHECK_REG(iR11); |
|
97 CHECK_REG(iR12); |
|
98 CHECK_REG(iR13); |
|
99 CHECK_REG(iR14); |
|
100 CHECK_REGA(iR15,ThreadContextHwExc_pc) |
|
101 asm("mov r0,#1"); |
|
102 __JUMP(,lr); |
|
103 } |
|
104 |
|
105 |
|
106 |
|
107 __NAKED__ TInt ThreadContextUserInt(TAny*) |
|
108 { |
|
109 asm("mov r1, #0x70000000"); |
|
110 asm("bl SetRegs"); |
|
111 asm("ThreadContextUserInt_pc:"); |
|
112 asm("b ThreadContextUserInt_pc"); |
|
113 } |
|
114 |
|
115 __NAKED__ TInt CheckContextUserInt(TArmRegSet*,TArmRegSet*) |
|
116 { |
|
117 CHECK_REG(iR0); |
|
118 CHECK_REG(iR1); |
|
119 CHECK_REG(iR2); |
|
120 CHECK_REG(iR3); |
|
121 CHECK_REG(iR4); |
|
122 CHECK_REG(iR5); |
|
123 CHECK_REG(iR6); |
|
124 CHECK_REG(iR7); |
|
125 CHECK_REG(iR8); |
|
126 CHECK_REG(iR9); |
|
127 CHECK_REG(iR10); |
|
128 CHECK_REG(iR11); |
|
129 CHECK_REG(iR12); |
|
130 CHECK_REG(iR13); |
|
131 CHECK_REG(iR14); |
|
132 CHECK_REGA(iR15,ThreadContextUserInt_pc) |
|
133 asm("mov r0,#1"); |
|
134 __JUMP(,lr); |
|
135 } |
|
136 |
|
137 __NAKED__ TInt CheckContextUserIntDied(TArmRegSet*,TArmRegSet*) |
|
138 { |
|
139 CHECK_REG(iR0); |
|
140 CHECK_REG(iR1); |
|
141 CHECK_REG(iR2); |
|
142 CHECK_REG(iR3); |
|
143 CHECK_REG(iR12); |
|
144 CHECK_REG(iR13); |
|
145 CHECK_REG(iR14); |
|
146 CHECK_REGA(iR15,ThreadContextUserInt_pc) |
|
147 asm("mov r0,#1"); |
|
148 __JUMP(,lr); |
|
149 } |
|
150 |
|
151 |
|
152 __NAKED__ TInt ThreadContextWFAR(TAny*) |
|
153 { |
|
154 asm("stmdb sp!, {r4-r11,lr} "); |
|
155 asm("mov r1, #0x60000000"); |
|
156 asm("bl SetRegs"); |
|
157 asm("adr lr, ThreadContextWFAR_return"); |
|
158 FAST_EXEC0(EFastExecWaitForAnyRequest); |
|
159 asm("ThreadContextWFAR_return:"); |
|
160 asm("mov r0, #0 "); |
|
161 asm("ldmia sp!, {r4-r11,pc} "); |
|
162 } |
|
163 |
|
164 __NAKED__ TInt CheckContextWFAR(TArmRegSet*,TArmRegSet*) |
|
165 { |
|
166 CHECK_REG(iR4); |
|
167 CHECK_REG(iR5); |
|
168 CHECK_REG(iR6); |
|
169 CHECK_REG(iR7); |
|
170 CHECK_REG(iR8); |
|
171 CHECK_REG(iR9); |
|
172 CHECK_REG(iR10); |
|
173 CHECK_REG(iR11); |
|
174 CHECK_REG(iR13); |
|
175 CHECK_REGA(iR14,ThreadContextWFAR_return) |
|
176 CHECK_REGA(iR15,ThreadContextWFAR_return - 4) |
|
177 asm("mov r0,#1"); |
|
178 __JUMP(,lr); |
|
179 } |
|
180 |
|
181 __NAKED__ TInt CheckContextWFARDied(TArmRegSet*,TArmRegSet*) |
|
182 { |
|
183 CHECK_REG(iR13); |
|
184 CHECK_REGA(iR14,ThreadContextWFAR_return) |
|
185 CHECK_REGA(iR15,ThreadContextWFAR_return - 4) |
|
186 asm("mov r0,#1"); |
|
187 __JUMP(,lr); |
|
188 } |
|
189 |
|
190 |
|
191 |
|
192 __NAKED__ TInt ThreadContextExecCall(TAny*) |
|
193 { |
|
194 asm("stmdb sp!, {r4-r11,lr} "); |
|
195 asm("mov r1, #0x50000000"); |
|
196 asm("bl SetRegs"); |
|
197 asm("adr lr, ThreadContextExecCall_return"); |
|
198 asm("ldr r0, current_thread_handle "); |
|
199 SLOW_EXEC1(EExecThreadSuspend); |
|
200 asm("ThreadContextExecCall_return:"); |
|
201 asm("mov r0, #0 "); |
|
202 asm("ldmia sp!, {r4-r11,pc} "); |
|
203 asm("current_thread_handle: "); |
|
204 asm(".word 0xffff8001 "); |
|
205 } |
|
206 |
|
207 __NAKED__ TInt CheckContextExecCall(TArmRegSet*,TArmRegSet*) |
|
208 { |
|
209 CHECK_REG(iR4); |
|
210 CHECK_REG(iR5); |
|
211 CHECK_REG(iR6); |
|
212 CHECK_REG(iR7); |
|
213 CHECK_REG(iR8); |
|
214 CHECK_REG(iR9); |
|
215 CHECK_REG(iR10); |
|
216 CHECK_REG(iR11); |
|
217 CHECK_REG(iR13); |
|
218 CHECK_REGA(iR14,ThreadContextExecCall_return) |
|
219 CHECK_REGA(iR15,ThreadContextExecCall_return - 4) |
|
220 asm("mov r0,#1"); |
|
221 __JUMP(,lr); |
|
222 } |
|
223 |
|
224 // |
|
225 // Simulate a software exception by invoking first the exec call which |
|
226 // triggers kernel-side handlers and on return panicking the current |
|
227 // thread. |
|
228 // |
|
229 |
|
230 __NAKED__ TInt ThreadContextSwExc(TAny*) |
|
231 { |
|
232 asm("stmdb sp!, {r4-r11,lr} "); |
|
233 asm("mov r1, #0x50000000"); |
|
234 asm("bl SetRegs"); |
|
235 asm("adr lr, ThreadContextSwExc_return"); |
|
236 asm("ldr r0, current_thread_handle "); |
|
237 asm("mov r2, #1"); |
|
238 SLOW_EXEC3(EExecIsExceptionHandled); |
|
239 asm("ThreadContextSwExc_return:"); |
|
240 asm("ldr r0, current_thread_handle "); |
|
241 asm("ldr r3, null_descriptor"); |
|
242 asm("mov r1, #%a0 " : : "i" ((TInt)EExitPanic)); |
|
243 asm("mov r2, #0"); |
|
244 SLOW_EXEC4(EExecThreadKill); |
|
245 asm("ldmia sp!, {r4-r11,pc} "); |
|
246 asm("null_descriptor:"); |
|
247 asm(".word 0x00000000"); |
|
248 asm(".word 0x00000000"); |
|
249 } |
|
250 |
|
251 __NAKED__ TInt CheckContextSwExc(TArmRegSet*,TArmRegSet*) |
|
252 { |
|
253 CHECK_REG(iR4); |
|
254 CHECK_REG(iR5); |
|
255 CHECK_REG(iR6); |
|
256 CHECK_REG(iR7); |
|
257 CHECK_REG(iR8); |
|
258 CHECK_REG(iR9); |
|
259 CHECK_REG(iR10); |
|
260 CHECK_REG(iR11); |
|
261 CHECK_REG(iR13); |
|
262 CHECK_REGA(iR14,ThreadContextSwExc_return) |
|
263 CHECK_REGA(iR15,ThreadContextSwExc_return - 4) |
|
264 asm("mov r0,#1"); |
|
265 __JUMP(,lr); |
|
266 } |
|
267 |
|
268 __NAKED__ TInt CheckContextKernel(TArmRegSet*,TArmRegSet*) |
|
269 { |
|
270 CHECK_REG(iR4); |
|
271 CHECK_REG(iR5); |
|
272 CHECK_REG(iR6); |
|
273 CHECK_REG(iR7); |
|
274 CHECK_REG(iR8); |
|
275 CHECK_REG(iR9); |
|
276 CHECK_REG(iR10); |
|
277 CHECK_REG(iR11); |
|
278 // can't test r13 because we don't know how much the irq vector pushes onto the stack |
|
279 // CHECK_REG(iR13); |
|
280 // can't really test r15 because pc is somewhere in the irq |
|
281 // vector and we don't export that address |
|
282 asm("mov r0,#1"); |
|
283 __JUMP(,lr); |
|
284 } |
|
285 |
|
286 #else |
|
287 |
|
288 #include <e32def.h> |
|
289 #include <cpudefs.h> |
|
290 |
|
291 __NAKED__ TUint32 SpinInKernel(TBool) |
|
292 { |
|
293 asm("cmp r0, #0 "); |
|
294 #ifdef __SMP__ |
|
295 asm("mov r0, sp "); |
|
296 #else |
|
297 asm("mov r0, sp"); |
|
298 // asm("sub r0, sp, #32 "); // IRQ mode pushes 8 extra registers <--- NOT TRUE |
|
299 #endif |
|
300 asm("beq exit "); |
|
301 asm("mov r0, #0xa0000000 "); |
|
302 asm("add r1, r0, #1 "); |
|
303 asm("add r2, r1, #1 "); |
|
304 asm("add r3, r2, #1 "); |
|
305 asm("add r4, r3, #1 "); |
|
306 asm("add r5, r4, #1 "); |
|
307 asm("add r6, r5, #1 "); |
|
308 asm("add r7, r6, #1 "); |
|
309 asm("add r8, r7, #1 "); |
|
310 asm("add r9, r8, #1 "); |
|
311 asm("add r10, r9, #1 "); |
|
312 asm("add r11, r10, #1 "); |
|
313 asm("add r12, r11, #1 "); |
|
314 asm("add r14, r12, #2 "); |
|
315 asm("loopforever: "); |
|
316 asm("b loopforever "); |
|
317 asm("exit: "); |
|
318 __JUMP(,lr); |
|
319 } |
|
320 |
|
321 #endif |