kernel/eka/include/e32atomics.h
changeset 0 a41df078684a
child 43 c1f20ce4abcf
equal deleted inserted replaced
-1:000000000000 0:a41df078684a
       
     1 /*
       
     2 * Copyright (c) 2007-2009 Nokia Corporation and/or its subsidiary(-ies).
       
     3 * All rights reserved.
       
     4 * This component and the accompanying materials are made available
       
     5 * under the terms of the License "Eclipse Public License v1.0"
       
     6 * which accompanies this distribution, and is available
       
     7 * at the URL "http://www.eclipse.org/legal/epl-v10.html".
       
     8 *
       
     9 * Initial Contributors:
       
    10 * Nokia Corporation - initial contribution.
       
    11 *
       
    12 * Contributors:
       
    13 *
       
    14 * Description:
       
    15 * e32/include/e32atomics.h
       
    16 * 
       
    17 *
       
    18 */
       
    19 
       
    20 
       
    21 
       
    22 #ifndef __E32ATOMICS_H__
       
    23 #define __E32ATOMICS_H__
       
    24 #include <e32def.h>
       
    25 
       
    26 /**	@file e32atomics.h
       
    27 	@publishedAll
       
    28 	@prototype
       
    29 */
       
    30 
       
    31 
       
    32 /*
       
    33 Versions needed:
       
    34 	WINS/WINSCW		Use X86 locked operations. Assume Pentium or above CPU (CMPXCHG8B available)
       
    35 	X86				For Pentium and above use locked operations
       
    36 					For 486 use locked operations for 8, 16, 32 bit. For 64 bit must disable interrupts.
       
    37 					NOTE: 486 not supported at the moment
       
    38 	ARMv4/ARMv5		Must disable interrupts.
       
    39 	ARMv6			LDREX/STREX for 8, 16, 32 bit. For 64 bit must disable interrupts (maybe).
       
    40 	ARMv6K/ARMv7	LDREXB/LDREXH/LDREX/LDREXD
       
    41 */
       
    42 
       
    43 #ifdef __cplusplus
       
    44 extern "C" {
       
    45 #endif
       
    46 
       
    47 IMPORT_C void		__e32_memory_barrier();												/* Barrier guaranteeing ordering of memory accesses */
       
    48 IMPORT_C void		__e32_io_completion_barrier();										/* Barrier guaranteeing ordering and completion of memory accesses */
       
    49 
       
    50 /* Atomic operations on 8 bit quantities */
       
    51 IMPORT_C TUint8		__e32_atomic_load_acq8(const volatile TAny* a);						/* read 8 bit acquire semantics */
       
    52 IMPORT_C TUint8		__e32_atomic_store_rel8(volatile TAny* a, TUint8 v);				/* write 8 bit, return v, release semantics */
       
    53 IMPORT_C TUint8		__e32_atomic_store_ord8(volatile TAny* a, TUint8 v);				/* write 8 bit, return v, full fence */
       
    54 IMPORT_C TUint8		__e32_atomic_swp_rlx8(volatile TAny* a, TUint8 v);					/* write 8 bit, return original, relaxed */
       
    55 IMPORT_C TUint8		__e32_atomic_swp_acq8(volatile TAny* a, TUint8 v);					/* write 8 bit, return original, acquire */
       
    56 IMPORT_C TUint8		__e32_atomic_swp_rel8(volatile TAny* a, TUint8 v);					/* write 8 bit, return original, release */
       
    57 IMPORT_C TUint8		__e32_atomic_swp_ord8(volatile TAny* a, TUint8 v);					/* write 8 bit, return original, full fence */
       
    58 IMPORT_C TBool		__e32_atomic_cas_rlx8(volatile TAny* a, TUint8* q, TUint8 v);		/* if (*a==*q) {*a=v; return TRUE;} else {*q=*a; return FALSE;} */
       
    59 IMPORT_C TBool		__e32_atomic_cas_acq8(volatile TAny* a, TUint8* q, TUint8 v);
       
    60 IMPORT_C TBool		__e32_atomic_cas_rel8(volatile TAny* a, TUint8* q, TUint8 v);
       
    61 IMPORT_C TBool		__e32_atomic_cas_ord8(volatile TAny* a, TUint8* q, TUint8 v);
       
    62 IMPORT_C TUint8		__e32_atomic_add_rlx8(volatile TAny* a, TUint8 v);					/* *a += v; return original *a; */
       
    63 IMPORT_C TUint8		__e32_atomic_add_acq8(volatile TAny* a, TUint8 v);
       
    64 IMPORT_C TUint8		__e32_atomic_add_rel8(volatile TAny* a, TUint8 v);
       
    65 IMPORT_C TUint8		__e32_atomic_add_ord8(volatile TAny* a, TUint8 v);
       
    66 IMPORT_C TUint8		__e32_atomic_and_rlx8(volatile TAny* a, TUint8 v);					/* *a &= v; return original *a; */
       
    67 IMPORT_C TUint8		__e32_atomic_and_acq8(volatile TAny* a, TUint8 v);
       
    68 IMPORT_C TUint8		__e32_atomic_and_rel8(volatile TAny* a, TUint8 v);
       
    69 IMPORT_C TUint8		__e32_atomic_and_ord8(volatile TAny* a, TUint8 v);
       
    70 IMPORT_C TUint8		__e32_atomic_ior_rlx8(volatile TAny* a, TUint8 v);					/* *a |= v; return original *a; */
       
    71 IMPORT_C TUint8		__e32_atomic_ior_acq8(volatile TAny* a, TUint8 v);
       
    72 IMPORT_C TUint8		__e32_atomic_ior_rel8(volatile TAny* a, TUint8 v);
       
    73 IMPORT_C TUint8		__e32_atomic_ior_ord8(volatile TAny* a, TUint8 v);
       
    74 IMPORT_C TUint8		__e32_atomic_xor_rlx8(volatile TAny* a, TUint8 v);					/* *a ^= v; return original *a; */
       
    75 IMPORT_C TUint8		__e32_atomic_xor_acq8(volatile TAny* a, TUint8 v);
       
    76 IMPORT_C TUint8		__e32_atomic_xor_rel8(volatile TAny* a, TUint8 v);
       
    77 IMPORT_C TUint8		__e32_atomic_xor_ord8(volatile TAny* a, TUint8 v);
       
    78 IMPORT_C TUint8		__e32_atomic_axo_rlx8(volatile TAny* a, TUint8 u, TUint8 v);		/* *a = (*a & u) ^ v; return original *a; */
       
    79 IMPORT_C TUint8		__e32_atomic_axo_acq8(volatile TAny* a, TUint8 u, TUint8 v);
       
    80 IMPORT_C TUint8		__e32_atomic_axo_rel8(volatile TAny* a, TUint8 u, TUint8 v);
       
    81 IMPORT_C TUint8		__e32_atomic_axo_ord8(volatile TAny* a, TUint8 u, TUint8 v);
       
    82 IMPORT_C TUint8		__e32_atomic_tau_rlx8(volatile TAny* a, TUint8 t, TUint8 u, TUint8 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
    83 IMPORT_C TUint8		__e32_atomic_tau_acq8(volatile TAny* a, TUint8 t, TUint8 u, TUint8 v);
       
    84 IMPORT_C TUint8		__e32_atomic_tau_rel8(volatile TAny* a, TUint8 t, TUint8 u, TUint8 v);
       
    85 IMPORT_C TUint8		__e32_atomic_tau_ord8(volatile TAny* a, TUint8 t, TUint8 u, TUint8 v);
       
    86 IMPORT_C TInt8		__e32_atomic_tas_rlx8(volatile TAny* a, TInt8 t, TInt8 u, TInt8 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
    87 IMPORT_C TInt8		__e32_atomic_tas_acq8(volatile TAny* a, TInt8 t, TInt8 u, TInt8 v);
       
    88 IMPORT_C TInt8		__e32_atomic_tas_rel8(volatile TAny* a, TInt8 t, TInt8 u, TInt8 v);
       
    89 IMPORT_C TInt8		__e32_atomic_tas_ord8(volatile TAny* a, TInt8 t, TInt8 u, TInt8 v);
       
    90 
       
    91 /* Atomic operations on 16 bit quantities */
       
    92 IMPORT_C TUint16	__e32_atomic_load_acq16(const volatile TAny* a);					/* read 16 bit acquire semantics */
       
    93 IMPORT_C TUint16	__e32_atomic_store_rel16(volatile TAny* a, TUint16 v);				/* write 16 bit, return v, release semantics */
       
    94 IMPORT_C TUint16	__e32_atomic_store_ord16(volatile TAny* a, TUint16 v);				/* write 16 bit, return v, full fence */
       
    95 IMPORT_C TUint16	__e32_atomic_swp_rlx16(volatile TAny* a, TUint16 v);				/* write 16 bit, return original, relaxed */
       
    96 IMPORT_C TUint16	__e32_atomic_swp_acq16(volatile TAny* a, TUint16 v);				/* write 16 bit, return original, acquire */
       
    97 IMPORT_C TUint16	__e32_atomic_swp_rel16(volatile TAny* a, TUint16 v);				/* write 16 bit, return original, release */
       
    98 IMPORT_C TUint16	__e32_atomic_swp_ord16(volatile TAny* a, TUint16 v);				/* write 16 bit, return original, full fence */
       
    99 IMPORT_C TBool		__e32_atomic_cas_rlx16(volatile TAny* a, TUint16* q, TUint16 v);	/* if (*a==*q) {*a=v; return TRUE;} else {*q=*a; return FALSE;} */
       
   100 IMPORT_C TBool		__e32_atomic_cas_acq16(volatile TAny* a, TUint16* q, TUint16 v);
       
   101 IMPORT_C TBool		__e32_atomic_cas_rel16(volatile TAny* a, TUint16* q, TUint16 v);
       
   102 IMPORT_C TBool		__e32_atomic_cas_ord16(volatile TAny* a, TUint16* q, TUint16 v);
       
   103 IMPORT_C TUint16	__e32_atomic_add_rlx16(volatile TAny* a, TUint16 v);				/* *a += v; return original *a; */
       
   104 IMPORT_C TUint16	__e32_atomic_add_acq16(volatile TAny* a, TUint16 v);
       
   105 IMPORT_C TUint16	__e32_atomic_add_rel16(volatile TAny* a, TUint16 v);
       
   106 IMPORT_C TUint16	__e32_atomic_add_ord16(volatile TAny* a, TUint16 v);
       
   107 IMPORT_C TUint16	__e32_atomic_and_rlx16(volatile TAny* a, TUint16 v);				/* *a &= v; return original *a; */
       
   108 IMPORT_C TUint16	__e32_atomic_and_acq16(volatile TAny* a, TUint16 v);
       
   109 IMPORT_C TUint16	__e32_atomic_and_rel16(volatile TAny* a, TUint16 v);
       
   110 IMPORT_C TUint16	__e32_atomic_and_ord16(volatile TAny* a, TUint16 v);
       
   111 IMPORT_C TUint16	__e32_atomic_ior_rlx16(volatile TAny* a, TUint16 v);				/* *a |= v; return original *a; */
       
   112 IMPORT_C TUint16	__e32_atomic_ior_acq16(volatile TAny* a, TUint16 v);
       
   113 IMPORT_C TUint16	__e32_atomic_ior_rel16(volatile TAny* a, TUint16 v);
       
   114 IMPORT_C TUint16	__e32_atomic_ior_ord16(volatile TAny* a, TUint16 v);
       
   115 IMPORT_C TUint16	__e32_atomic_xor_rlx16(volatile TAny* a, TUint16 v);				/* *a ^= v; return original *a; */
       
   116 IMPORT_C TUint16	__e32_atomic_xor_acq16(volatile TAny* a, TUint16 v);
       
   117 IMPORT_C TUint16	__e32_atomic_xor_rel16(volatile TAny* a, TUint16 v);
       
   118 IMPORT_C TUint16	__e32_atomic_xor_ord16(volatile TAny* a, TUint16 v);
       
   119 IMPORT_C TUint16	__e32_atomic_axo_rlx16(volatile TAny* a, TUint16 u, TUint16 v);		/* *a = (*a & u) ^ v; return original *a; */
       
   120 IMPORT_C TUint16	__e32_atomic_axo_acq16(volatile TAny* a, TUint16 u, TUint16 v);
       
   121 IMPORT_C TUint16	__e32_atomic_axo_rel16(volatile TAny* a, TUint16 u, TUint16 v);
       
   122 IMPORT_C TUint16	__e32_atomic_axo_ord16(volatile TAny* a, TUint16 u, TUint16 v);
       
   123 IMPORT_C TUint16	__e32_atomic_tau_rlx16(volatile TAny* a, TUint16 t, TUint16 u, TUint16 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   124 IMPORT_C TUint16	__e32_atomic_tau_acq16(volatile TAny* a, TUint16 t, TUint16 u, TUint16 v);
       
   125 IMPORT_C TUint16	__e32_atomic_tau_rel16(volatile TAny* a, TUint16 t, TUint16 u, TUint16 v);
       
   126 IMPORT_C TUint16	__e32_atomic_tau_ord16(volatile TAny* a, TUint16 t, TUint16 u, TUint16 v);
       
   127 IMPORT_C TInt16		__e32_atomic_tas_rlx16(volatile TAny* a, TInt16 t, TInt16 u, TInt16 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   128 IMPORT_C TInt16		__e32_atomic_tas_acq16(volatile TAny* a, TInt16 t, TInt16 u, TInt16 v);
       
   129 IMPORT_C TInt16		__e32_atomic_tas_rel16(volatile TAny* a, TInt16 t, TInt16 u, TInt16 v);
       
   130 IMPORT_C TInt16		__e32_atomic_tas_ord16(volatile TAny* a, TInt16 t, TInt16 u, TInt16 v);
       
   131 
       
   132 /* Atomic operations on 32 bit quantities */
       
   133 IMPORT_C TUint32	__e32_atomic_load_acq32(const volatile TAny* a);					/* read 32 bit acquire semantics */
       
   134 IMPORT_C TUint32	__e32_atomic_store_rel32(volatile TAny* a, TUint32 v);				/* write 32 bit, return v, release semantics */
       
   135 IMPORT_C TUint32	__e32_atomic_store_ord32(volatile TAny* a, TUint32 v);				/* write 32 bit, return v, full fence */
       
   136 IMPORT_C TUint32	__e32_atomic_swp_rlx32(volatile TAny* a, TUint32 v);				/* write 32 bit, return original, relaxed */
       
   137 IMPORT_C TUint32	__e32_atomic_swp_acq32(volatile TAny* a, TUint32 v);				/* write 32 bit, return original, acquire */
       
   138 IMPORT_C TUint32	__e32_atomic_swp_rel32(volatile TAny* a, TUint32 v);				/* write 32 bit, return original, release */
       
   139 IMPORT_C TUint32	__e32_atomic_swp_ord32(volatile TAny* a, TUint32 v);				/* write 32 bit, return original, full fence */
       
   140 IMPORT_C TBool		__e32_atomic_cas_rlx32(volatile TAny* a, TUint32* q, TUint32 v);	/* if (*a==*q) {*a=v; return TRUE;} else {*q=*a; return FALSE;} */
       
   141 IMPORT_C TBool		__e32_atomic_cas_acq32(volatile TAny* a, TUint32* q, TUint32 v);
       
   142 IMPORT_C TBool		__e32_atomic_cas_rel32(volatile TAny* a, TUint32* q, TUint32 v);
       
   143 IMPORT_C TBool		__e32_atomic_cas_ord32(volatile TAny* a, TUint32* q, TUint32 v);
       
   144 IMPORT_C TUint32	__e32_atomic_add_rlx32(volatile TAny* a, TUint32 v);				/* *a += v; return original *a; */
       
   145 IMPORT_C TUint32	__e32_atomic_add_acq32(volatile TAny* a, TUint32 v);
       
   146 IMPORT_C TUint32	__e32_atomic_add_rel32(volatile TAny* a, TUint32 v);
       
   147 IMPORT_C TUint32	__e32_atomic_add_ord32(volatile TAny* a, TUint32 v);
       
   148 IMPORT_C TUint32	__e32_atomic_and_rlx32(volatile TAny* a, TUint32 v);				/* *a &= v; return original *a; */
       
   149 IMPORT_C TUint32	__e32_atomic_and_acq32(volatile TAny* a, TUint32 v);
       
   150 IMPORT_C TUint32	__e32_atomic_and_rel32(volatile TAny* a, TUint32 v);
       
   151 IMPORT_C TUint32	__e32_atomic_and_ord32(volatile TAny* a, TUint32 v);
       
   152 IMPORT_C TUint32	__e32_atomic_ior_rlx32(volatile TAny* a, TUint32 v);				/* *a |= v; return original *a; */
       
   153 IMPORT_C TUint32	__e32_atomic_ior_acq32(volatile TAny* a, TUint32 v);
       
   154 IMPORT_C TUint32	__e32_atomic_ior_rel32(volatile TAny* a, TUint32 v);
       
   155 IMPORT_C TUint32	__e32_atomic_ior_ord32(volatile TAny* a, TUint32 v);
       
   156 IMPORT_C TUint32	__e32_atomic_xor_rlx32(volatile TAny* a, TUint32 v);				/* *a ^= v; return original *a; */
       
   157 IMPORT_C TUint32	__e32_atomic_xor_acq32(volatile TAny* a, TUint32 v);
       
   158 IMPORT_C TUint32	__e32_atomic_xor_rel32(volatile TAny* a, TUint32 v);
       
   159 IMPORT_C TUint32	__e32_atomic_xor_ord32(volatile TAny* a, TUint32 v);
       
   160 IMPORT_C TUint32	__e32_atomic_axo_rlx32(volatile TAny* a, TUint32 u, TUint32 v);		/* *a = (*a & u) ^ v; return original *a; */
       
   161 IMPORT_C TUint32	__e32_atomic_axo_acq32(volatile TAny* a, TUint32 u, TUint32 v);
       
   162 IMPORT_C TUint32	__e32_atomic_axo_rel32(volatile TAny* a, TUint32 u, TUint32 v);
       
   163 IMPORT_C TUint32	__e32_atomic_axo_ord32(volatile TAny* a, TUint32 u, TUint32 v);
       
   164 IMPORT_C TUint32	__e32_atomic_tau_rlx32(volatile TAny* a, TUint32 t, TUint32 u, TUint32 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   165 IMPORT_C TUint32	__e32_atomic_tau_acq32(volatile TAny* a, TUint32 t, TUint32 u, TUint32 v);
       
   166 IMPORT_C TUint32	__e32_atomic_tau_rel32(volatile TAny* a, TUint32 t, TUint32 u, TUint32 v);
       
   167 IMPORT_C TUint32	__e32_atomic_tau_ord32(volatile TAny* a, TUint32 t, TUint32 u, TUint32 v);
       
   168 IMPORT_C TInt32		__e32_atomic_tas_rlx32(volatile TAny* a, TInt32 t, TInt32 u, TInt32 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   169 IMPORT_C TInt32		__e32_atomic_tas_acq32(volatile TAny* a, TInt32 t, TInt32 u, TInt32 v);
       
   170 IMPORT_C TInt32		__e32_atomic_tas_rel32(volatile TAny* a, TInt32 t, TInt32 u, TInt32 v);
       
   171 IMPORT_C TInt32		__e32_atomic_tas_ord32(volatile TAny* a, TInt32 t, TInt32 u, TInt32 v);
       
   172 
       
   173 /* Atomic operations on 64 bit quantities */
       
   174 IMPORT_C TUint64	__e32_atomic_load_acq64(const volatile TAny* a);					/* read 64 bit acquire semantics */
       
   175 IMPORT_C TUint64	__e32_atomic_store_rel64(volatile TAny* a, TUint64 v);				/* write 64 bit, return v, release semantics */
       
   176 IMPORT_C TUint64	__e32_atomic_store_ord64(volatile TAny* a, TUint64 v);				/* write 64 bit, return v, full fence */
       
   177 IMPORT_C TUint64	__e32_atomic_swp_rlx64(volatile TAny* a, TUint64 v);				/* write 64 bit, return original, relaxed */
       
   178 IMPORT_C TUint64	__e32_atomic_swp_acq64(volatile TAny* a, TUint64 v);				/* write 64 bit, return original, acquire */
       
   179 IMPORT_C TUint64	__e32_atomic_swp_rel64(volatile TAny* a, TUint64 v);				/* write 64 bit, return original, release */
       
   180 IMPORT_C TUint64	__e32_atomic_swp_ord64(volatile TAny* a, TUint64 v);				/* write 64 bit, return original, full fence */
       
   181 IMPORT_C TBool		__e32_atomic_cas_rlx64(volatile TAny* a, TUint64* q, TUint64 v);	/* if (*a==*q) {*a=v; return TRUE;} else {*q=*a; return FALSE;} */
       
   182 IMPORT_C TBool		__e32_atomic_cas_acq64(volatile TAny* a, TUint64* q, TUint64 v);
       
   183 IMPORT_C TBool		__e32_atomic_cas_rel64(volatile TAny* a, TUint64* q, TUint64 v);
       
   184 IMPORT_C TBool		__e32_atomic_cas_ord64(volatile TAny* a, TUint64* q, TUint64 v);
       
   185 IMPORT_C TUint64	__e32_atomic_add_rlx64(volatile TAny* a, TUint64 v);				/* *a += v; return original *a; */
       
   186 IMPORT_C TUint64	__e32_atomic_add_acq64(volatile TAny* a, TUint64 v);
       
   187 IMPORT_C TUint64	__e32_atomic_add_rel64(volatile TAny* a, TUint64 v);
       
   188 IMPORT_C TUint64	__e32_atomic_add_ord64(volatile TAny* a, TUint64 v);
       
   189 IMPORT_C TUint64	__e32_atomic_and_rlx64(volatile TAny* a, TUint64 v);				/* *a &= v; return original *a; */
       
   190 IMPORT_C TUint64	__e32_atomic_and_acq64(volatile TAny* a, TUint64 v);
       
   191 IMPORT_C TUint64	__e32_atomic_and_rel64(volatile TAny* a, TUint64 v);
       
   192 IMPORT_C TUint64	__e32_atomic_and_ord64(volatile TAny* a, TUint64 v);
       
   193 IMPORT_C TUint64	__e32_atomic_ior_rlx64(volatile TAny* a, TUint64 v);				/* *a |= v; return original *a; */
       
   194 IMPORT_C TUint64	__e32_atomic_ior_acq64(volatile TAny* a, TUint64 v);
       
   195 IMPORT_C TUint64	__e32_atomic_ior_rel64(volatile TAny* a, TUint64 v);
       
   196 IMPORT_C TUint64	__e32_atomic_ior_ord64(volatile TAny* a, TUint64 v);
       
   197 IMPORT_C TUint64	__e32_atomic_xor_rlx64(volatile TAny* a, TUint64 v);				/* *a ^= v; return original *a; */
       
   198 IMPORT_C TUint64	__e32_atomic_xor_acq64(volatile TAny* a, TUint64 v);
       
   199 IMPORT_C TUint64	__e32_atomic_xor_rel64(volatile TAny* a, TUint64 v);
       
   200 IMPORT_C TUint64	__e32_atomic_xor_ord64(volatile TAny* a, TUint64 v);
       
   201 IMPORT_C TUint64	__e32_atomic_axo_rlx64(volatile TAny* a, TUint64 u, TUint64 v);		/* *a = (*a & u) ^ v; return original *a; */
       
   202 IMPORT_C TUint64	__e32_atomic_axo_acq64(volatile TAny* a, TUint64 u, TUint64 v);
       
   203 IMPORT_C TUint64	__e32_atomic_axo_rel64(volatile TAny* a, TUint64 u, TUint64 v);
       
   204 IMPORT_C TUint64	__e32_atomic_axo_ord64(volatile TAny* a, TUint64 u, TUint64 v);
       
   205 IMPORT_C TUint64	__e32_atomic_tau_rlx64(volatile TAny* a, TUint64 t, TUint64 u, TUint64 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   206 IMPORT_C TUint64	__e32_atomic_tau_acq64(volatile TAny* a, TUint64 t, TUint64 u, TUint64 v);
       
   207 IMPORT_C TUint64	__e32_atomic_tau_rel64(volatile TAny* a, TUint64 t, TUint64 u, TUint64 v);
       
   208 IMPORT_C TUint64	__e32_atomic_tau_ord64(volatile TAny* a, TUint64 t, TUint64 u, TUint64 v);
       
   209 IMPORT_C TInt64		__e32_atomic_tas_rlx64(volatile TAny* a, TInt64 t, TInt64 u, TInt64 v);	/* if (*a>=t) *a+=u else *a+=v; return original *a; */
       
   210 IMPORT_C TInt64		__e32_atomic_tas_acq64(volatile TAny* a, TInt64 t, TInt64 u, TInt64 v);
       
   211 IMPORT_C TInt64		__e32_atomic_tas_rel64(volatile TAny* a, TInt64 t, TInt64 u, TInt64 v);
       
   212 IMPORT_C TInt64		__e32_atomic_tas_ord64(volatile TAny* a, TInt64 t, TInt64 u, TInt64 v);
       
   213 
       
   214 /*	Atomic operations on pointers
       
   215 	These are implemented as macro definitions over the 32 or 64 bit operations
       
   216 */
       
   217 /*	IMPORT_C TAny*		__e32_atomic_load_acq_ptr(const volatile TAny* a);												*/
       
   218 #define	__e32_atomic_load_acq_ptr(a)		((TAny*)__e32_atomic_load_acq32(a))
       
   219 /*	IMPORT_C TAny*		__e32_atomic_store_rel_ptr(volatile TAny* a, const volatile TAny* v);							*/
       
   220 #define	__e32_atomic_store_rel_ptr(a,v)		((TAny*)__e32_atomic_store_rel32(a,(T_UintPtr)(v)))
       
   221 /*	IMPORT_C TAny*		__e32_atomic_store_ord_ptr(volatile TAny* a, const volatile TAny* v);							*/
       
   222 #define	__e32_atomic_store_ord_ptr(a,v)		((TAny*)__e32_atomic_store_ord32(a,(T_UintPtr)(v)))
       
   223 /*	IMPORT_C TAny*		__e32_atomic_swp_rlx_ptr(volatile TAny* a, const volatile TAny* v);								*/
       
   224 #define	__e32_atomic_swp_rlx_ptr(a,v)		((TAny*)__e32_atomic_swp_rlx32(a,(T_UintPtr)(v)))
       
   225 /*	IMPORT_C TAny*		__e32_atomic_swp_acq_ptr(volatile TAny* a, const volatile TAny* v);								*/
       
   226 #define	__e32_atomic_swp_acq_ptr(a,v)		((TAny*)__e32_atomic_swp_acq32(a,(T_UintPtr)(v)))
       
   227 /*	IMPORT_C TAny*		__e32_atomic_swp_rel_ptr(volatile TAny* a, const volatile TAny* v);								*/
       
   228 #define	__e32_atomic_swp_rel_ptr(a,v)		((TAny*)__e32_atomic_swp_rel32(a,(T_UintPtr)(v)))
       
   229 /*	IMPORT_C TAny*		__e32_atomic_swp_ord_ptr(volatile TAny* a, const volatile TAny* v);								*/
       
   230 #define	__e32_atomic_swp_ord_ptr(a,v)		((TAny*)__e32_atomic_swp_ord32(a,(T_UintPtr)(v)))
       
   231 /*	IMPORT_C TBool		__e32_atomic_cas_rlx_ptr(volatile TAny* a, const volatile TAny** q, const volatile TAny* v);	*/
       
   232 #define	__e32_atomic_cas_rlx_ptr(a,q,v)		(__e32_atomic_cas_rlx32(a,(T_UintPtr*)(q),(T_UintPtr)(v)))
       
   233 /*	IMPORT_C TBool		__e32_atomic_cas_acq_ptr(volatile TAny* a, const volatile TAny** q, const volatile TAny* v);	*/
       
   234 #define	__e32_atomic_cas_acq_ptr(a,q,v)		(__e32_atomic_cas_acq32(a,(T_UintPtr*)(q),(T_UintPtr)(v)))
       
   235 /*	IMPORT_C TBool		__e32_atomic_cas_rel_ptr(volatile TAny* a, const volatile TAny** q, const volatile TAny* v);	*/
       
   236 #define	__e32_atomic_cas_rel_ptr(a,q,v)		(__e32_atomic_cas_rel32(a,(T_UintPtr*)(q),(T_UintPtr)(v)))
       
   237 /*	IMPORT_C TBool		__e32_atomic_cas_ord_ptr(volatile TAny* a, const volatile TAny** q, const volatile TAny* v);	*/
       
   238 #define	__e32_atomic_cas_ord_ptr(a,q,v)		(__e32_atomic_cas_ord32(a,(T_UintPtr*)(q),(T_UintPtr)(v)))
       
   239 /*	IMPORT_C TAny*		__e32_atomic_add_rlx_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   240 #define	__e32_atomic_add_rlx_ptr(a,v)		((TAny*)__e32_atomic_add_rlx32(a,(T_UintPtr)(v)))
       
   241 /*	IMPORT_C TAny*		__e32_atomic_add_acq_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   242 #define	__e32_atomic_add_acq_ptr(a,v)		((TAny*)__e32_atomic_add_acq32(a,(T_UintPtr)(v)))
       
   243 /*	IMPORT_C TAny*		__e32_atomic_add_rel_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   244 #define	__e32_atomic_add_rel_ptr(a,v)		((TAny*)__e32_atomic_add_rel32(a,(T_UintPtr)(v)))
       
   245 /*	IMPORT_C TAny*		__e32_atomic_add_ord_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   246 #define	__e32_atomic_add_ord_ptr(a,v)		((TAny*)__e32_atomic_add_ord32(a,(T_UintPtr)(v)))
       
   247 /*	IMPORT_C TAny*		__e32_atomic_and_rlx_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   248 #define	__e32_atomic_and_rlx_ptr(a,v)		((TAny*)__e32_atomic_and_rlx32(a,(T_UintPtr)(v)))
       
   249 /*	IMPORT_C TAny*		__e32_atomic_and_acq_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   250 #define	__e32_atomic_and_acq_ptr(a,v)		((TAny*)__e32_atomic_and_acq32(a,(T_UintPtr)(v)))
       
   251 /*	IMPORT_C TAny*		__e32_atomic_and_rel_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   252 #define	__e32_atomic_and_rel_ptr(a,v)		((TAny*)__e32_atomic_and_rel32(a,(T_UintPtr)(v)))
       
   253 /*	IMPORT_C TAny*		__e32_atomic_and_ord_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   254 #define	__e32_atomic_and_ord_ptr(a,v)		((TAny*)__e32_atomic_and_ord32(a,(T_UintPtr)(v)))
       
   255 /*	IMPORT_C TAny*		__e32_atomic_ior_rlx_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   256 #define	__e32_atomic_ior_rlx_ptr(a,v)		((TAny*)__e32_atomic_ior_rlx32(a,(T_UintPtr)(v)))
       
   257 /*	IMPORT_C TAny*		__e32_atomic_ior_acq_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   258 #define	__e32_atomic_ior_acq_ptr(a,v)		((TAny*)__e32_atomic_ior_acq32(a,(T_UintPtr)(v)))
       
   259 /*	IMPORT_C TAny*		__e32_atomic_ior_rel_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   260 #define	__e32_atomic_ior_rel_ptr(a,v)		((TAny*)__e32_atomic_ior_rel32(a,(T_UintPtr)(v)))
       
   261 /*	IMPORT_C TAny*		__e32_atomic_ior_ord_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   262 #define	__e32_atomic_ior_ord_ptr(a,v)		((TAny*)__e32_atomic_ior_ord32(a,(T_UintPtr)(v)))
       
   263 /*	IMPORT_C TAny*		__e32_atomic_xor_rlx_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   264 #define	__e32_atomic_xor_rlx_ptr(a,v)		((TAny*)__e32_atomic_xor_rlx32(a,(T_UintPtr)(v)))
       
   265 /*	IMPORT_C TAny*		__e32_atomic_xor_acq_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   266 #define	__e32_atomic_xor_acq_ptr(a,v)		((TAny*)__e32_atomic_xor_acq32(a,(T_UintPtr)(v)))
       
   267 /*	IMPORT_C TAny*		__e32_atomic_xor_rel_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   268 #define	__e32_atomic_xor_rel_ptr(a,v)		((TAny*)__e32_atomic_xor_rel32(a,(T_UintPtr)(v)))
       
   269 /*	IMPORT_C TAny*		__e32_atomic_xor_ord_ptr(volatile TAny* a, T_UintPtr v);										*/
       
   270 #define	__e32_atomic_xor_ord_ptr(a,v)		((TAny*)__e32_atomic_xor_ord32(a,(T_UintPtr)(v)))
       
   271 /*	IMPORT_C TAny*		__e32_atomic_axo_rlx_ptr(volatile TAny* a, T_UintPtr u, T_UintPtr v);							*/
       
   272 #define	__e32_atomic_axo_rlx_ptr(a,u,v)		((TAny*)__e32_atomic_axo_rlx32(a,(T_UintPtr)(u),(T_UintPtr)(v)))
       
   273 /*	IMPORT_C TAny*		__e32_atomic_axo_acq_ptr(volatile TAny* a, T_UintPtr u, T_UintPtr v);							*/
       
   274 #define	__e32_atomic_axo_acq_ptr(a,u,v)		((TAny*)__e32_atomic_axo_acq32(a,(T_UintPtr)(u),(T_UintPtr)(v)))
       
   275 /*	IMPORT_C TAny*		__e32_atomic_axo_rel_ptr(volatile TAny* a, T_UintPtr u, T_UintPtr v);							*/
       
   276 #define	__e32_atomic_axo_rel_ptr(a,u,v)		((TAny*)__e32_atomic_axo_rel32(a,(T_UintPtr)(u),(T_UintPtr)(v)))
       
   277 /*	IMPORT_C TAny*		__e32_atomic_axo_ord_ptr(volatile TAny* a, T_UintPtr u, T_UintPtr v);							*/
       
   278 #define	__e32_atomic_axo_ord_ptr(a,u,v)		((TAny*)__e32_atomic_axo_ord32(a,(T_UintPtr)(u),(T_UintPtr)(v)))
       
   279 /*	IMPORT_C TAny*		__e32_atomic_tau_rlx_ptr(volatile TAny* a, const volatile TAny* t, T_UintPtr u, T_UintPtr v);	*/
       
   280 #define	__e32_atomic_tau_rlx_ptr(a,t,u,v)	((TAny*)__e32_atomic_tau_rlx32(a,(T_UintPtr)(t),(T_UintPtr)(u),(T_UintPtr)(v)))
       
   281 /*	IMPORT_C TAny*		__e32_atomic_tau_acq_ptr(volatile TAny* a, const volatile TAny* t, T_UintPtr u, T_UintPtr v);	*/
       
   282 #define	__e32_atomic_tau_acq_ptr(a,t,u,v)	((TAny*)__e32_atomic_tau_acq32(a,(T_UintPtr)(t),(T_UintPtr)(u),(T_UintPtr)(v)))
       
   283 /*	IMPORT_C TAny*		__e32_atomic_tau_rel_ptr(volatile TAny* a, const volatile TAny* t, T_UintPtr u, T_UintPtr v);	*/
       
   284 #define	__e32_atomic_tau_rel_ptr(a,t,u,v)	((TAny*)__e32_atomic_tau_rel32(a,(T_UintPtr)(t),(T_UintPtr)(u),(T_UintPtr)(v)))
       
   285 /*	IMPORT_C TAny*		__e32_atomic_tau_ord_ptr(volatile TAny* a, const volatile TAny* t, T_UintPtr u, T_UintPtr v);	*/
       
   286 #define	__e32_atomic_tau_ord_ptr(a,t,u,v)	((TAny*)__e32_atomic_tau_ord32(a,(T_UintPtr)(t),(T_UintPtr)(u),(T_UintPtr)(v)))
       
   287 
       
   288 /*	Miscellaneous utility functions
       
   289 */
       
   290 IMPORT_C TInt		__e32_find_ms1_32(TUint32 v);		/* return bit number of most significant 1, -1 if argument zero */
       
   291 IMPORT_C TInt		__e32_find_ls1_32(TUint32 v);		/* return bit number of least significant 1, -1 if argument zero */
       
   292 IMPORT_C TInt		__e32_bit_count_32(TUint32 v);		/* return number of bits with value 1 */
       
   293 IMPORT_C TInt		__e32_find_ms1_64(TUint64 v);		/* return bit number of most significant 1, -1 if argument zero */
       
   294 IMPORT_C TInt		__e32_find_ls1_64(TUint64 v);		/* return bit number of least significant 1, -1 if argument zero */
       
   295 IMPORT_C TInt		__e32_bit_count_64(TUint64 v);		/* return number of bits with value 1 */
       
   296 
       
   297 #ifdef __cplusplus
       
   298 } /* extern "C" */
       
   299 #endif
       
   300 
       
   301 
       
   302 #endif	/* __E32ATOMICS_H__ */