kernel/eka/common/arm/atomic_32_v6.h
changeset 43 96e5fb8b040d
equal deleted inserted replaced
-1:000000000000 43:96e5fb8b040d
       
     1 // Copyright (c) 2008-2009 Nokia Corporation and/or its subsidiary(-ies).
       
     2 // All rights reserved.
       
     3 // This component and the accompanying materials are made available
       
     4 // under the terms of the License "Eclipse Public License v1.0"
       
     5 // which accompanies this distribution, and is available
       
     6 // at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     7 //
       
     8 // Initial Contributors:
       
     9 // Nokia Corporation - initial contribution.
       
    10 //
       
    11 // Contributors:
       
    12 //
       
    13 // Description:
       
    14 // e32\common\arm\atomic_32_v6.h
       
    15 // 32 bit atomic operations on V6 and V6K processors
       
    16 // Also 8 and 16 bit atomic operations on V6K processors
       
    17 // Also 8, 16 and 32 bit load/store on all processors
       
    18 // 
       
    19 //
       
    20 
       
    21 #include "atomic_ops.h"
       
    22 
       
    23 #if defined(__OP_LOAD__)
       
    24 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(const volatile TAny* /*a*/)
       
    25 	{
       
    26 	// R0=a
       
    27 	// return value in R0
       
    28 
       
    29 	__LDR_INST__( ," r0, [r0] ");
       
    30 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r1);
       
    31 	__JUMP(,lr);
       
    32 	}
       
    33 
       
    34 
       
    35 
       
    36 #elif defined(__OP_STORE__)
       
    37 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
    38 	{
       
    39 #ifdef __BARRIERS_NEEDED__
       
    40 	// R0=a, R1=v
       
    41 	// return value in R0 equal to v
       
    42 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
    43 	__STR_INST__( ," r1, [r0] ");
       
    44 	asm("mov r0, r1 ");
       
    45 	__JUMP(,lr);
       
    46 #endif
       
    47 	}
       
    48 
       
    49 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
    50 	{
       
    51 	// R0=a, R1=v
       
    52 	// return value in R0 equal to v
       
    53 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
    54 	__STR_INST__( ," r1, [r0] ");
       
    55 	__LOCAL_DATA_MEMORY_BARRIER__(r12);
       
    56 	asm("mov r0, r1 ");
       
    57 	__JUMP(,lr);
       
    58 	}
       
    59 
       
    60 
       
    61 #elif defined(__OP_RMW1__)
       
    62 
       
    63 #ifdef __OP_SWP__
       
    64 #define	__SOURCE_REG__		1
       
    65 #define	__DO_PROCESSING__
       
    66 #else
       
    67 #define	__SOURCE_REG__		2
       
    68 #if defined(__OP_ADD__)
       
    69 #define	__DO_PROCESSING__	asm("add r2, r0, r1 ");
       
    70 #elif defined(__OP_AND__)
       
    71 #define	__DO_PROCESSING__	asm("and r2, r0, r1 ");
       
    72 #elif defined(__OP_IOR__)
       
    73 #define	__DO_PROCESSING__	asm("orr r2, r0, r1 ");
       
    74 #elif defined(__OP_XOR__)
       
    75 #define	__DO_PROCESSING__	asm("eor r2, r0, r1 ");
       
    76 #endif
       
    77 #endif
       
    78 
       
    79 #define __DO_RMW1_OP__				\
       
    80 	asm("mov r12, r0 ");			\
       
    81 	asm("1: ");						\
       
    82 	__LDREX_INST__(0,12);			\
       
    83 	__DO_PROCESSING__				\
       
    84 	__STREX_INST__(3,__SOURCE_REG__,12);	\
       
    85 	asm("cmp r3, #0 ");				\
       
    86 	asm("bne 1b ");
       
    87 
       
    88 
       
    89 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
    90 	{
       
    91 	// R0=a, R1=v
       
    92 	// return value in R0
       
    93 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
    94 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
    95 #endif
       
    96 	}
       
    97 
       
    98 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
    99 	{
       
   100 	// R0=a, R1=v
       
   101 	// return value in R0
       
   102 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   103 	__DO_RMW1_OP__
       
   104 	__JUMP(,lr);
       
   105 #endif
       
   106 	}
       
   107 
       
   108 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
   109 	{
       
   110 	// R0=a, R1=v
       
   111 	// return value in R0
       
   112 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   113 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   114 #endif
       
   115 	}
       
   116 
       
   117 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*v*/)
       
   118 	{
       
   119 	// R0=a, R1=v
       
   120 	// return value in R0
       
   121 	__DO_RMW1_OP__
       
   122 	__LOCAL_DATA_MEMORY_BARRIER__(r3);
       
   123 	__JUMP(,lr);
       
   124 	}
       
   125 
       
   126 #undef __DO_RMW1_OP__
       
   127 #undef __SOURCE_REG__
       
   128 #undef __DO_PROCESSING__
       
   129 
       
   130 
       
   131 #elif defined(__OP_CAS__)
       
   132 
       
   133 #define __DO_CAS_OP__				\
       
   134 	__LDR_INST__( ," r12, [r1] ");	\
       
   135 	asm("1: ");						\
       
   136 	__LDREX_INST__(3,0);			\
       
   137 	asm("cmp r3, r12 ");			\
       
   138 	asm("bne 2f ");					\
       
   139 	__STREX_INST__(3,2,0);			\
       
   140 	asm("cmp r3, #0 ");				\
       
   141 	asm("bne 1b ");					\
       
   142 	asm("2: ");						\
       
   143 	__STR_INST__(ne, "r3, [r1] ");	\
       
   144 	asm("movne r0, #0 ");			\
       
   145 	asm("moveq r0, #1 ");
       
   146 
       
   147 
       
   148 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
       
   149 	{
       
   150 	// R0=a, R1=q, R2=v
       
   151 	// return value in R0
       
   152 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   153 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   154 #endif
       
   155 	}
       
   156 
       
   157 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
       
   158 	{
       
   159 	// R0=a, R1=q, R2=v
       
   160 	// return value in R0
       
   161 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   162 	__DO_CAS_OP__
       
   163 	__JUMP(,lr);
       
   164 #endif
       
   165 	}
       
   166 
       
   167 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
       
   168 	{
       
   169 	// R0=a, R1=q, R2=v
       
   170 	// return value in R0
       
   171 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   172 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   173 #endif
       
   174 	}
       
   175 
       
   176 extern "C" EXPORT_C __NAKED__ TBool __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ * /*q*/, __TYPE__ /*v*/)
       
   177 	{
       
   178 	// R0=a, R1=q, R2=v
       
   179 	// return value in R0
       
   180 	__DO_CAS_OP__
       
   181 	__LOCAL_DATA_MEMORY_BARRIER__(r3);
       
   182 	__JUMP(,lr);
       
   183 	}
       
   184 
       
   185 #undef __DO_CAS_OP__
       
   186 
       
   187 
       
   188 
       
   189 #elif defined(__OP_AXO__)
       
   190 
       
   191 #define	__SAVE_REGS__		asm("str r4, [sp, #-4]! ");
       
   192 #define	__RESTORE_REGS__	asm("ldr r4, [sp], #4 ");
       
   193 
       
   194 #define __DO_AXO_OP__				\
       
   195 	asm("mov r12, r0 ");			\
       
   196 	asm("1: ");						\
       
   197 	__LDREX_INST__(0,12);			\
       
   198 	asm("and r4, r0, r1 ");			\
       
   199 	asm("eor r4, r4, r2 ");			\
       
   200 	__STREX_INST__(3,4,12);			\
       
   201 	asm("cmp r3, #0 ");				\
       
   202 	asm("bne 1b ");
       
   203 
       
   204 
       
   205 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   206 	{
       
   207 	// R0=a, R1=u, R2=v
       
   208 	// return value in R0
       
   209 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   210 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   211 #endif
       
   212 	}
       
   213 
       
   214 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   215 	{
       
   216 	// R0=a, R1=u, R2=v
       
   217 	// return value in R0
       
   218 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   219 	__SAVE_REGS__
       
   220 	__DO_AXO_OP__
       
   221 	__RESTORE_REGS__
       
   222 	__JUMP(,lr);
       
   223 #endif
       
   224 	}
       
   225 
       
   226 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   227 	{
       
   228 	// R0=a, R1=u, R2=v
       
   229 	// return value in R0
       
   230 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   231 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   232 #endif
       
   233 	}
       
   234 
       
   235 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   236 	{
       
   237 	// R0=a, R1=u, R2=v
       
   238 	// return value in R0
       
   239 	__SAVE_REGS__
       
   240 	__DO_AXO_OP__
       
   241 	__LOCAL_DATA_MEMORY_BARRIER__(r3);
       
   242 	__RESTORE_REGS__
       
   243 	__JUMP(,lr);
       
   244 	}
       
   245 
       
   246 #undef __SAVE_REGS__
       
   247 #undef __RESTORE_REGS__
       
   248 #undef __DO_AXO_OP__
       
   249 
       
   250 
       
   251 #elif defined(__OP_RMW3__)
       
   252 
       
   253 #define	__SAVE_REGS__		asm("stmfd sp!, {r4-r5} ");
       
   254 #define	__RESTORE_REGS__	asm("ldmfd sp!, {r4-r5} ");
       
   255 
       
   256 #if	defined(__OP_TAU__)
       
   257 #define	__COND_GE__		"cs"
       
   258 #define	__COND_LT__		"cc"
       
   259 #define	__DO_SIGN_EXTEND__
       
   260 #elif	defined(__OP_TAS__)
       
   261 #define	__COND_GE__		"ge"
       
   262 #define	__COND_LT__		"lt"
       
   263 #define	__DO_SIGN_EXTEND__	__SIGN_EXTEND__(r0)
       
   264 #endif
       
   265 
       
   266 #define __DO_RMW3_OP__				\
       
   267 	asm("mov r12, r0 ");			\
       
   268 	asm("1: ");						\
       
   269 	__LDREX_INST__(0,12);			\
       
   270 	__DO_SIGN_EXTEND__				\
       
   271 	asm("cmp r0, r1 ");				\
       
   272 	asm("add" __COND_GE__ " r4, r0, r2 ");	\
       
   273 	asm("add" __COND_LT__ " r4, r0, r3 ");	\
       
   274 	__STREX_INST__(5,4,12);			\
       
   275 	asm("cmp r5, #0 ");				\
       
   276 	asm("bne 1b ");
       
   277 
       
   278 
       
   279 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rel,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   280 	{
       
   281 	// R0=a, R1=t, R2=u, R3=v
       
   282 	// return value in R0
       
   283 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   284 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   285 #endif
       
   286 	}
       
   287 
       
   288 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,rlx,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   289 	{
       
   290 	// R0=a, R1=t, R2=u, R3=v
       
   291 	// return value in R0
       
   292 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   293 	__SAVE_REGS__
       
   294 	__DO_RMW3_OP__
       
   295 	__RESTORE_REGS__
       
   296 	__JUMP(,lr);
       
   297 #endif
       
   298 	}
       
   299 
       
   300 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,ord,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   301 	{
       
   302 	// R0=a, R1=t, R2=u, R3=v
       
   303 	// return value in R0
       
   304 #ifdef __BARRIERS_NEEDED__				// If no barriers, all ordering variants collapse to same function
       
   305 	__LOCAL_DATA_MEMORY_BARRIER_Z__(r12);
       
   306 #endif
       
   307 	}
       
   308 
       
   309 extern "C" EXPORT_C __NAKED__ __TYPE__ __fname__(__OPERATION__,acq,__DATA_SIZE__)(volatile TAny* /*a*/, __TYPE__ /*t*/, __TYPE__ /*u*/, __TYPE__ /*v*/)
       
   310 	{
       
   311 	// R0=a, R1=t, R2=u, R3=v
       
   312 	// return value in R0
       
   313 	__SAVE_REGS__
       
   314 	__DO_RMW3_OP__
       
   315 	__LOCAL_DATA_MEMORY_BARRIER__(r5);
       
   316 	__RESTORE_REGS__
       
   317 	__JUMP(,lr);
       
   318 	}
       
   319 
       
   320 #undef __SAVE_REGS__
       
   321 #undef __RESTORE_REGS__
       
   322 #undef __DO_RMW3_OP__
       
   323 #undef __COND_GE__
       
   324 #undef __COND_LT__
       
   325 #undef __DO_SIGN_EXTEND__
       
   326 
       
   327 
       
   328 #endif
       
   329 
       
   330 // Second inclusion undefines temporaries
       
   331 #include "atomic_ops.h"