author | Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com> |
Wed, 18 Aug 2010 11:08:29 +0300 | |
changeset 247 | d8d70de2bd36 |
parent 90 | 947f0dc9f7a8 |
permissions | -rw-r--r-- |
0 | 1 |
// Copyright (c) 1994-2009 Nokia Corporation and/or its subsidiary(-ies). |
2 |
// All rights reserved. |
|
3 |
// This component and the accompanying materials are made available |
|
4 |
// under the terms of the License "Eclipse Public License v1.0" |
|
5 |
// which accompanies this distribution, and is available |
|
6 |
// at the URL "http://www.eclipse.org/legal/epl-v10.html". |
|
7 |
// |
|
8 |
// Initial Contributors: |
|
9 |
// Nokia Corporation - initial contribution. |
|
10 |
// |
|
11 |
// Contributors: |
|
12 |
// |
|
13 |
// Description: |
|
14 |
// e32\kernel\arm\ckernel.cpp |
|
15 |
// |
|
16 |
// |
|
17 |
||
18 |
#include <arm_mem.h> |
|
19 |
#include <arm_vfp.h> |
|
20 |
||
21 |
#define iMState iWaitLink.iSpare1 |
|
22 |
#define iExiting iWaitLink.iSpare2 |
|
23 |
||
24 |
GLREF_C void __ArmVectorReset(); |
|
25 |
GLREF_C void __ArmVectorUndef(); |
|
26 |
GLREF_C void __ArmVectorSwi(); |
|
27 |
GLREF_C void __ArmVectorAbortPrefetch(); |
|
28 |
GLREF_C void __ArmVectorAbortData(); |
|
29 |
GLREF_C void __ArmVectorReserved(); |
|
30 |
GLREF_C void __ArmVectorIrq(); |
|
31 |
GLREF_C void __ArmVectorFiq(); |
|
32 |
||
33 |
extern "C" void ExcFault(TAny* aExcInfo); |
|
34 |
||
35 |
/******************************************** |
|
36 |
* Thread |
|
37 |
********************************************/ |
|
38 |
||
39 |
DArmPlatThread::~DArmPlatThread() |
|
40 |
{ |
|
41 |
DThread::Destruct(); |
|
42 |
} |
|
43 |
||
44 |
TInt DArmPlatThread::Context(TDes8& aDes) |
|
45 |
{ |
|
46 |
TArmRegSet& s=*(TArmRegSet*)aDes.Ptr(); |
|
47 |
aDes.SetLength(sizeof(s)); |
|
48 |
TInt r=KErrNone; |
|
49 |
if (iThreadType!=EThreadUser || this==TheCurrentThread) |
|
50 |
r=KErrAccessDenied; |
|
51 |
else if (iExiting) |
|
52 |
r=KErrDied; |
|
53 |
else |
|
54 |
{ |
|
55 |
TUint32 unused; |
|
56 |
NKern::ThreadGetUserContext(&iNThread,&s,unused); |
|
57 |
} |
|
58 |
return r; |
|
59 |
} |
|
60 |
||
61 |
DProcess* P::NewProcess() |
|
62 |
{ |
|
63 |
return new DArmPlatProcess; |
|
64 |
} |
|
65 |
||
66 |
#ifdef __CPU_HAS_VFP |
|
67 |
#ifdef __VFP_V3 |
|
68 |
const TInt KVfpContextSize = (1 + (32 * 2)) * 4; // FPSCR + 32 dword registers |
|
69 |
#else |
|
70 |
const TInt KVfpContextSize = (3 + (16 * 2)) * 4; // FPSCR + FPINST + FPINST2 + 16 dword registers |
|
71 |
#endif |
|
72 |
#endif |
|
73 |
TInt DArmPlatProcess::GetNewThread(DThread*& aThread, SThreadCreateInfo& aInfo) |
|
74 |
// |
|
75 |
// Create a new DArmPlatThread object |
|
76 |
// If aThread=NULL on entry, allocate it on the kernel heap, |
|
77 |
// otherwise do in-place construction. |
|
78 |
// |
|
79 |
{ |
|
80 |
DArmPlatThread* pT=(DArmPlatThread*)aThread; |
|
81 |
if (!pT) |
|
82 |
{ |
|
83 |
TInt size = sizeof(DArmPlatThread); |
|
84 |
#ifdef __CPU_HAS_VFP |
|
85 |
size += KVfpContextSize; |
|
86 |
#endif |
|
87 |
__KTRACE_OPT(KTHREAD,Kern::Printf("GetNewThread size=%04x",size)); |
|
88 |
pT = (DArmPlatThread*)Kern::AllocZ(size); |
|
89 |
} |
|
90 |
if (!pT) |
|
91 |
return KErrNoMemory; |
|
92 |
new (pT) DArmPlatThread; |
|
93 |
aThread = pT; |
|
94 |
pT->iOwningProcess=this; |
|
95 |
||
96 |
#ifdef __CPU_HAS_VFP |
|
97 |
pT->iNThread.iExtraContext = (TUint8*)pT + sizeof(DArmPlatThread); |
|
98 |
*(TUint32*)(pT->iNThread.iExtraContext) = Arm::VfpDefaultFpScr; |
|
99 |
// Inherit parent VFP FPSCR value if applicable |
|
100 |
if ((TInt)aInfo.iType != (TInt)EThreadInitial) |
|
101 |
{ |
|
102 |
if (pT->iOwningProcess == Kern::CurrentThread().iOwningProcess) |
|
103 |
{ |
|
104 |
if (Arm::FpExc() & VFP_FPEXC_EN) |
|
105 |
{ |
|
106 |
*(TUint32*)(pT->iNThread.iExtraContext) = Arm::FpScr() & VFP_FPSCR_MODE_MASK; |
|
107 |
} |
|
247
d8d70de2bd36
Revision: 201033
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
90
diff
changeset
|
108 |
else |
d8d70de2bd36
Revision: 201033
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
90
diff
changeset
|
109 |
{ |
d8d70de2bd36
Revision: 201033
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
90
diff
changeset
|
110 |
*(TUint32*)(pT->iNThread.iExtraContext) = *(TUint32*)(Kern::CurrentThread().iNThread.iExtraContext); |
d8d70de2bd36
Revision: 201033
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
90
diff
changeset
|
111 |
} |
0 | 112 |
} |
113 |
} |
|
114 |
#endif |
|
115 |
||
116 |
return KErrNone; |
|
117 |
} |
|
118 |
||
119 |
extern void DumpExcInfo(TArmExcInfo& a); |
|
120 |
void DumpExcInfoX(TArmExcInfo& a) |
|
121 |
{ |
|
122 |
DumpExcInfo(a); |
|
123 |
NThread* nthread = NCurrentThread(); |
|
124 |
if (nthread == NULL) |
|
125 |
Kern::Printf("No current thread"); |
|
126 |
else |
|
127 |
{ |
|
128 |
DThread* thread = Kern::NThreadToDThread(NCurrentThread()); |
|
129 |
if (thread) |
|
130 |
{ |
|
131 |
TFullName thread_name; |
|
132 |
thread->TraceAppendFullName(thread_name, EFalse); |
|
133 |
Kern::Printf("Thread full name=%S", &thread_name); |
|
134 |
Kern::Printf("Thread ID=%d, KernCSLocked=%d",TheCurrentThread->iId,NKern::KernelLocked()); |
|
135 |
} |
|
136 |
else |
|
137 |
Kern::Printf("Thread N/A, KernCSLocked=%d",NKern::KernelLocked()); |
|
138 |
} |
|
139 |
} |
|
140 |
||
141 |
extern void DumpFullRegSet(SFullArmRegSet& a); |
|
142 |
extern void GetUndefinedInstruction(TArmExcInfo* /*aContext*/); |
|
143 |
extern void PushExcInfoOnUserStack(TArmExcInfo*, TInt); |
|
144 |
extern "C" { |
|
145 |
extern SFullArmRegSet DefaultRegSet; |
|
146 |
} |
|
147 |
||
148 |
#ifdef __CPU_HAS_VFP |
|
149 |
GLREF_C TInt HandleVFPOperation(TAny* aPtr); |
|
150 |
#endif |
|
151 |
||
152 |
void Exc::Dispatch(TAny* aPtr, NThread*) |
|
153 |
{ |
|
154 |
#ifdef __CPU_ARM_ABORT_MODEL_UPDATED |
|
155 |
#error Processors implementing the 'Base Register Updated' Abort Model are no longer supported |
|
156 |
#endif |
|
157 |
||
158 |
TArmExcInfo* pR=(TArmExcInfo*)aPtr; |
|
159 |
TInt mode=pR->iCpsr & EMaskMode; |
|
160 |
||
161 |
TBool faultHandled = EFalse; |
|
162 |
||
163 |
#ifdef __DEMAND_PAGING__ |
|
164 |
faultHandled |= M::DemandPagingFault(aPtr) == KErrNone; |
|
165 |
#endif |
|
166 |
||
167 |
if(!faultHandled) |
|
168 |
faultHandled |= M::RamDefragFault(aPtr) == KErrNone; |
|
169 |
||
170 |
if(faultHandled) |
|
171 |
{ |
|
172 |
#ifdef __ATOMIC64_USE_SLOW_EXEC__ |
|
173 |
if (mode==ESvcMode && pR->iExcCode==EArmExceptionDataAbort && IsMagicAtomic64(pR->iR15)) |
|
174 |
{ |
|
175 |
// Magic atomic instruction so return to next instruction to stop any |
|
176 |
// writes to memory being executed and ensure interrupts are enabled. |
|
177 |
pR->iR15 += 4; |
|
178 |
pR->iCpsr &= ~KAllInterruptsMask; |
|
179 |
} |
|
180 |
#endif |
|
181 |
return; |
|
182 |
} |
|
183 |
||
184 |
if (mode==ESvcMode && pR->iExcCode==EArmExceptionDataAbort && IsMagic(pR->iR15)) |
|
185 |
{ |
|
186 |
// skip instruction that caused the exception, set the zero flag and place faulted address in r12 |
|
187 |
__KTRACE_OPT(KPANIC,DumpExcInfoX(*pR)); |
|
188 |
pR->iR15 += 4; |
|
189 |
pR->iCpsr |= ECpuZf; |
|
190 |
pR->iR12 = pR->iFaultAddress; |
|
191 |
return; |
|
192 |
} |
|
193 |
||
194 |
DThread* pT=TheCurrentThread; |
|
195 |
TExcTrap* xt=pT->iExcTrap; |
|
196 |
if (xt) |
|
197 |
{ |
|
198 |
__KTRACE_OPT(KPANIC,DumpExcInfoX(*pR)); |
|
199 |
// current thread wishes to handle exceptions |
|
200 |
(*xt->iHandler)(xt,pT,aPtr); |
|
201 |
} |
|
202 |
||
203 |
if (NKern::HeldFastMutex()) // thread held fast mutex when exception occurred |
|
204 |
Exc::Fault(aPtr); |
|
205 |
||
206 |
#ifdef __CPU_HAS_VFP |
|
207 |
if (pR->iExcCode==EArmExceptionUndefinedOpcode) |
|
208 |
{ |
|
209 |
// Get the undefined instruction |
|
210 |
GetUndefinedInstruction(pR); |
|
211 |
||
212 |
const TUint32 opcode = pR->iFaultAddress; |
|
213 |
TInt cpnum = -1; |
|
214 |
||
215 |
#ifdef __SUPPORT_THUMB_INTERWORKING |
|
216 |
if (!(pR->iCpsr & ECpuThumb)) { |
|
217 |
#endif |
|
218 |
// check for coprocessor instructions |
|
219 |
// 10987654321098765432109876543210 |
|
220 |
// CDP: cond1110op1 CRn CRd cp_#op20CRm |
|
221 |
// LDC: cond110PUNW1Rn CRd cp_#offset |
|
222 |
// STC: cond110PUNW0Rn CRd cp_#offset |
|
223 |
// MRC: cond1110op11CRn Rd cp_#op21CRm |
|
224 |
// MCR: cond1110op10CRn Rd cp_#op21CRm |
|
225 |
// ext: cond11000x0xRn CRd cp_#offset |
|
226 |
//CDP2: 11111110xxxxxxxxxxxxxxxxxxx0xxxx |
|
227 |
//LDC2: 1111110xxxx1xxxxxxxxxxxxxxxxxxxx |
|
228 |
//STC2: 1111110xxxx0xxxxxxxxxxxxxxxxxxxx |
|
229 |
//MRC2: 11111110xxx1xxxxxxxxxxxxxxx1xxxx |
|
230 |
//MCR2: 11111110xxx0xxxxxxxxxxxxxxx1xxxx |
|
231 |
//MRRC: cond11100101Rn Rd cp_#opc CRm |
|
232 |
//MCRR: cond11100100Rn Rd cp_#opc CRm |
|
233 |
// |
|
234 |
//NEON data processing: |
|
235 |
// 1111001xxxxxxxxxxxxxxxxxxxxxxxxx |
|
236 |
//NEON element/structure load/store: |
|
237 |
// 11110100xxx0xxxxxxxxxxxxxxxxxxxx |
|
238 |
// |
|
239 |
// Coprocessor instructions have 2nd hex digit (bits 24-27) C,D,E. |
|
240 |
// The coprocessor number is in bits 8-11. |
|
241 |
// NEON instructions have 1st hex digits F2, F3, or F4x where x is even |
|
242 |
// No coprocessor number, route to cp 10 |
|
243 |
||
244 |
TUint32 hex2 = (opcode>>24) & 0x0f; |
|
245 |
||
246 |
if (hex2==0xc || hex2==0xd || hex2==0xe) |
|
247 |
cpnum=(opcode>>8)&0x0f; |
|
248 |
#ifdef __CPU_ARMV7 |
|
249 |
else if ((opcode>>28)==0xf && (hex2==2 || hex2==3 || (hex2==4 && ((opcode>>20)&1)==0))) |
|
250 |
cpnum=VFP_CPID_S; |
|
251 |
#endif |
|
252 |
||
253 |
#ifdef __SUPPORT_THUMB_INTERWORKING |
|
254 |
} |
|
255 |
#endif |
|
256 |
#ifdef __CPU_ARMV7 |
|
257 |
else |
|
258 |
{ |
|
259 |
// Check for coprocessor instructions (thumb mode, so only first halfword) |
|
260 |
// 5432109876543210 5432109876543210 |
|
261 |
// CDP: 11101110op1 CRn CRd cp_#op20CRm |
|
262 |
// LDC: 1110110PUNW1Rn CRd cp_#offset |
|
263 |
// STC: 1110110PUNW0Rn CRd cp_#offset |
|
264 |
// MRC: 11101110op11CRn Rd cp_#op21CRm |
|
265 |
// MCR: 11101110op10CRn Rd cp_#op21CRm |
|
266 |
// CDP2: 11111110xxxxxxxx xxxxxxxxxx0xxxxx |
|
267 |
// LDC2: 1111110xxxx1xxxx xxxxxxxxxxxxxxxx |
|
268 |
// STC2: 1111110xxxx0xxxx xxxxxxxxxxxxxxxx |
|
269 |
// MRC2: 11111110xxx1xxxx xxxxxxxxxx1xxxxx |
|
270 |
// MCR2: 11111110xxx0xxxx xxxxxxxxxx1xxxxx |
|
271 |
// MRRC: 111011100101Rn Rd cp_#opc CRm |
|
272 |
// MCRR: 111011100100Rn Rd cp_#opc CRm |
|
273 |
// |
|
274 |
// Operations starting 1111 are not currently valid for VFP/NEON |
|
275 |
// but are handled here in case of future development or |
|
276 |
// alternative coprocessors |
|
277 |
// |
|
278 |
// NEON data processing: |
|
279 |
// 111x1111xxxxxxxx xxxxxxxxxxxxxxxx |
|
280 |
// NEON element/structure load/store: |
|
281 |
// 11111001xxx0xxxx xxxxxxxxxxxxxxxx |
|
282 |
// |
|
283 |
// Coprocessor instructions have first hex digit E or F |
|
284 |
// and second C, D or E |
|
285 |
// The coprocessor number is in bits 8-11 of the second halfword |
|
286 |
// NEON instructions have first 2 hex digits EF, FF or F9 |
|
287 |
// No coprocessor number, route to cp 10 |
|
288 |
||
289 |
const TUint32 hex12 = opcode >> 8; |
|
290 |
||
291 |
if ((hex12 & 0xe0) == 0xe0) |
|
292 |
{ |
|
293 |
const TUint32 hex2 = hex12 & 0xf; |
|
294 |
if (hex2 == 0xc || hex2 == 0xd || hex2 == 0xe) |
|
295 |
{ |
|
296 |
TArmExcInfo nextInstruction = *pR; |
|
297 |
nextInstruction.iR15 += 2; |
|
298 |
GetUndefinedInstruction(&nextInstruction); |
|
299 |
cpnum = (nextInstruction.iFaultAddress >> 8) & 0x0f; |
|
300 |
} |
|
301 |
else |
|
302 |
{ |
|
303 |
if (hex12 == 0xef || hex12 == 0xf9 || hex12 == 0xff) |
|
304 |
cpnum = VFP_CPID_S; |
|
305 |
} |
|
306 |
} |
|
307 |
} |
|
308 |
#endif // __CPU_ARMV7 |
|
309 |
||
310 |
if (cpnum >= 0) |
|
311 |
{ |
|
312 |
__KTRACE_OPT(KEVENT,Kern::Printf("VFP Instruction %08x", opcode)); |
|
313 |
TInt r = HandleVFPOperation(pR); |
|
314 |
if (r==KErrNone) |
|
315 |
return; |
|
316 |
__KTRACE_OPT(KEVENT,Kern::Printf("VFP Instruction returned %d", r)); |
|
317 |
} |
|
318 |
} |
|
319 |
#endif // __CPU_HAS_VFP |
|
320 |
||
321 |
NKern::LockSystem(); |
|
322 |
if (pT->iThreadType==EThreadUser && mode==EUserMode) |
|
323 |
{ |
|
324 |
TExcType type=(TExcType)12; |
|
325 |
if (pT->IsExceptionHandled(type)) |
|
326 |
{ |
|
327 |
pT->iFlags |= KThreadFlagLastChance; |
|
328 |
NKern::UnlockSystem(); |
|
329 |
||
330 |
// tweak context to call exception handler |
|
331 |
PushExcInfoOnUserStack(pR, type); |
|
332 |
pR->iR15 = (TUint32)pT->iOwningProcess->iReentryPoint; |
|
333 |
pR->iR4 = KModuleEntryReasonException; |
|
334 |
#ifdef __SUPPORT_THUMB_INTERWORKING |
|
335 |
pR->iCpsr &= ~ECpuThumb; |
|
336 |
#endif |
|
337 |
return; |
|
338 |
} |
|
339 |
} |
|
340 |
// Fault system before attempting to signal kernel event handler as going to |
|
341 |
// crash anyway so no point trying to handle the event. Also, stops |
|
342 |
// D_EXC hanging system on crash of critical/permanent thread. |
|
343 |
if (pT->iFlags & (KThreadFlagSystemCritical|KThreadFlagSystemPermanent)) |
|
344 |
Exc::Fault(aPtr); |
|
345 |
NKern::UnlockSystem(); |
|
346 |
||
347 |
TUint m = DKernelEventHandler::Dispatch(EEventHwExc, pR, NULL); |
|
348 |
if (m & (TUint)DKernelEventHandler::EExcHandled) |
|
349 |
return; |
|
350 |
||
351 |
// __KTRACE_OPT(KPANIC,DumpExcInfoX(*pR)); |
|
352 |
DumpExcInfoX(*pR); |
|
353 |
||
354 |
// panic current thread |
|
355 |
K::PanicKernExec(ECausedException); |
|
356 |
} |
|
357 |
||
358 |
EXPORT_C void Exc::Fault(TAny* aExcInfo) |
|
359 |
{ |
|
360 |
#ifdef __SMP__ |
|
361 |
TSubScheduler* ss = &SubScheduler(); |
|
362 |
if (!ss) |
|
363 |
ss = &TheSubSchedulers[0]; |
|
90
947f0dc9f7a8
Revision: 201015
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
31
diff
changeset
|
364 |
ss->iSSX.iExcInfo = aExcInfo; |
947f0dc9f7a8
Revision: 201015
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
31
diff
changeset
|
365 |
SFullArmRegSet* a = ss->iSSX.iRegs; |
0 | 366 |
if (!a) |
367 |
a = &DefaultRegSet; |
|
368 |
#else |
|
369 |
TheScheduler.i_ExcInfo = aExcInfo; |
|
370 |
SFullArmRegSet* a = (SFullArmRegSet*)TheScheduler.i_Regs; |
|
371 |
#endif |
|
372 |
if (aExcInfo) |
|
373 |
{ |
|
374 |
Arm::SaveState(*a); |
|
375 |
Arm::UpdateState(*a, *(TArmExcInfo*)aExcInfo); |
|
376 |
} |
|
377 |
TExcInfo e; |
|
378 |
e.iCodeAddress = (TAny*)a->iN.iR15; |
|
379 |
e.iDataAddress = (TAny*)a->iB[0].iDFAR; |
|
380 |
e.iExtraData = (TInt)a->iB[0].iDFSR; |
|
381 |
// __KTRACE_OPT(KPANIC,DumpExcInfoX(*pR)); |
|
382 |
DumpFullRegSet(*a); |
|
383 |
TheSuperPage().iKernelExcId = a->iExcCode; |
|
384 |
TheSuperPage().iKernelExcInfo = e; |
|
385 |
Kern::Fault("Exception", K::ESystemException); |
|
386 |
} |
|
387 |
||
388 |
extern "C" void ExcFault(TAny* aExcInfo) |
|
389 |
{ |
|
390 |
Exc::Fault(aExcInfo); |
|
391 |
} |
|
392 |
||
393 |
void DArmPlatThread::DoExit2() |
|
394 |
{ |
|
395 |
#ifdef __CPU_HAS_VFP |
|
396 |
for (TInt cpu = 0; cpu < NKern::NumberOfCpus(); cpu++) |
|
397 |
{ |
|
398 |
// Ensure that if this thread object is re-used then it gets a fresh context |
|
399 |
if (Arm::VfpThread[cpu] == &iNThread) |
|
400 |
Arm::VfpThread[cpu] = NULL; |
|
401 |
#ifndef __SMP__ |
|
402 |
Arm::ModifyFpExc(VFP_FPEXC_EX | VFP_FPEXC_EN, 0); // Disable VFP here for unicore |
|
403 |
#endif |
|
404 |
} |
|
405 |
#endif |
|
406 |
} |
|
407 |
||
408 |
||
409 |
/** Sets the function used to handle bounces from VFP hardware. |
|
410 |
||
411 |
Used by a VFP coprocessor support kernel extension to register its |
|
412 |
bounce handler. |
|
413 |
||
414 |
@publishedPartner |
|
415 |
@released |
|
416 |
*/ |
|
417 |
EXPORT_C void Arm::SetVfpBounceHandler(TVfpBounceHandler aHandler) |
|
418 |
{ |
|
419 |
Arm::VfpBounceHandler = aHandler; |
|
420 |
} |
|
421 |
||
422 |
||
423 |
/** Sets the default value of FPSCR in the VFP hardware. |
|
424 |
||
425 |
Used by a VFP coprocessor support kernel extension to enable a |
|
426 |
better default mode than RunFast. |
|
427 |
||
428 |
@publishedPartner |
|
429 |
@released |
|
430 |
*/ |
|
431 |
EXPORT_C void Arm::SetVfpDefaultFpScr(TUint32 aFpScr) |
|
432 |
{ |
|
433 |
#ifdef __CPU_HAS_VFP |
|
434 |
VfpDefaultFpScr = aFpScr; |
|
435 |
#endif |
|
436 |
} |
|
437 |
||
438 |
||
439 |
#ifdef __CPU_HAS_VFP |
|
440 |
||
441 |
#ifndef __SMP__ |
|
442 |
extern void DoSaveVFP(void*); |
|
443 |
#endif |
|
444 |
extern void DoRestoreVFP(const void*); |
|
445 |
||
446 |
GLDEF_C TInt HandleVFPOperation(TAny* aPtr) |
|
447 |
{ |
|
448 |
NThread* pC = NCurrentThread(); |
|
449 |
||
450 |
if (Arm::FpExc() & VFP_FPEXC_EN) |
|
451 |
{ |
|
452 |
// Coprocessor already enabled so it must be a real exception |
|
453 |
if (Arm::VfpBounceHandler) |
|
454 |
return Arm::VfpBounceHandler((TArmExcInfo*)aPtr); |
|
455 |
else |
|
456 |
return KErrGeneral; |
|
457 |
} |
|
458 |
||
459 |
NKern::Lock(); |
|
460 |
||
461 |
// Enable access for this thread, clear any exceptional condition |
|
462 |
TUint32 oldFpExc = Arm::ModifyFpExc(VFP_FPEXC_EX, VFP_FPEXC_EN); |
|
463 |
||
464 |
#ifndef __SMP__ |
|
465 |
if (Arm::VfpThread[0] != pC) |
|
466 |
{ |
|
467 |
// Only for unicore - SMP explicitly saves the current context and disables VFP |
|
468 |
// when a thread is descheduled in case it runs on a different core next time |
|
469 |
if (Arm::VfpThread[0]) |
|
470 |
{ |
|
471 |
DoSaveVFP(Arm::VfpThread[0]->iExtraContext); |
|
472 |
Arm::VfpThread[0]->ModifyFpExc(VFP_FPEXC_EN, 0); // Take access away from previous thread |
|
473 |
} |
|
474 |
DoRestoreVFP(pC->iExtraContext); // Restore this thread's context |
|
475 |
Arm::VfpThread[0] = pC; |
|
476 |
} |
|
477 |
#else |
|
478 |
const TInt currentCpu = NKern::CurrentCpu(); |
|
479 |
if (Arm::VfpThread[currentCpu] != pC) |
|
480 |
{ |
|
481 |
DoRestoreVFP(pC->iExtraContext); // Restore this thread's context |
|
482 |
Arm::VfpThread[currentCpu] = pC; |
|
31
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
483 |
for (TInt cpu = 0; cpu < NKern::NumberOfCpus(); cpu++) |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
484 |
{ |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
485 |
if (cpu != currentCpu) |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
486 |
{ |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
487 |
TUint32 pCcopy = (TUint32)pC; |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
488 |
__e32_atomic_cas_rlx32(&Arm::VfpThread[cpu], &pCcopy, NULL); |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
489 |
} |
56f325a607ea
Revision: 200951
Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
parents:
0
diff
changeset
|
490 |
} |
0 | 491 |
} |
492 |
#endif |
|
493 |
||
494 |
// Put FPEXC back how it was in case there was a pending exception, but keep enable bit on |
|
495 |
Arm::SetFpExc(oldFpExc | VFP_FPEXC_EN); |
|
496 |
NKern::Unlock(); |
|
497 |
||
498 |
return KErrNone; |
|
499 |
} |
|
500 |
#endif // __CPU_HAS_VFP |
|
501 |
||
502 |
#ifdef _DEBUG |
|
503 |
extern "C" void __FaultIpcClientNotNull() |
|
504 |
{ |
|
505 |
K::Fault(K::EIpcClientNotNull); |
|
506 |
} |
|
507 |
#endif |