xref: /aosp_15_r20/external/ltp/include/tst_atomic.h (revision 49cdfc7efb34551c7342be41a7384b9c40d7cab7)
1*49cdfc7eSAndroid Build Coastguard Worker /* SPDX-License-Identifier: GPL-2.0-or-later
2*49cdfc7eSAndroid Build Coastguard Worker  * Copyright (c) 2016 Cyril Hrubis <[email protected]>
3*49cdfc7eSAndroid Build Coastguard Worker  */
4*49cdfc7eSAndroid Build Coastguard Worker 
5*49cdfc7eSAndroid Build Coastguard Worker /* The LTP library has some of its own atomic synchronisation primitives
6*49cdfc7eSAndroid Build Coastguard Worker  * contained in this file. Generally speaking these should not be used
7*49cdfc7eSAndroid Build Coastguard Worker  * directly in tests for synchronisation, instead use tst_checkpoint.h,
8*49cdfc7eSAndroid Build Coastguard Worker  * tst_fuzzy_sync.h or the POSIX library.
9*49cdfc7eSAndroid Build Coastguard Worker  *
10*49cdfc7eSAndroid Build Coastguard Worker  * Notes on compile and runtime memory barriers and atomics.
11*49cdfc7eSAndroid Build Coastguard Worker  *
12*49cdfc7eSAndroid Build Coastguard Worker  * Within the LTP library we have three concerns when accessing variables
13*49cdfc7eSAndroid Build Coastguard Worker  * shared by multiple threads or processes:
14*49cdfc7eSAndroid Build Coastguard Worker  *
15*49cdfc7eSAndroid Build Coastguard Worker  * (1) Removal or reordering of accesses by the compiler.
16*49cdfc7eSAndroid Build Coastguard Worker  * (2) Atomicity of addition.
17*49cdfc7eSAndroid Build Coastguard Worker  * (3) LOAD-STORE ordering between threads.
18*49cdfc7eSAndroid Build Coastguard Worker  *
19*49cdfc7eSAndroid Build Coastguard Worker  * The first (1) is the most likely to cause an error if not properly
20*49cdfc7eSAndroid Build Coastguard Worker  * handled. We avoid it by using volatile variables and statements which will
21*49cdfc7eSAndroid Build Coastguard Worker  * not be removed or reordered by the compiler during optimisation. This includes
22*49cdfc7eSAndroid Build Coastguard Worker  * the __atomic and __sync intrinsics and volatile asm statements marked with
23*49cdfc7eSAndroid Build Coastguard Worker  * "memory" as well as variables marked with volatile.
24*49cdfc7eSAndroid Build Coastguard Worker  *
25*49cdfc7eSAndroid Build Coastguard Worker  * On any platform Linux is likely to run on, a LOAD (fetch) or STORE of a
26*49cdfc7eSAndroid Build Coastguard Worker  * 32-bit integer will be atomic. However fetching and adding to a variable is
27*49cdfc7eSAndroid Build Coastguard Worker  * quite likely not; so for (2) we need to ensure we use atomic addition.
28*49cdfc7eSAndroid Build Coastguard Worker  *
29*49cdfc7eSAndroid Build Coastguard Worker  * Finally, for tst_fuzzy_sync at least, we need to ensure that LOADs and
30*49cdfc7eSAndroid Build Coastguard Worker  * STOREs of any shared variables (including non-atomics) that are made
31*49cdfc7eSAndroid Build Coastguard Worker  * between calls to tst_fzsync_wait are completed (globally visible) before
32*49cdfc7eSAndroid Build Coastguard Worker  * tst_fzsync_wait completes. For this, runtime memory and instruction
33*49cdfc7eSAndroid Build Coastguard Worker  * barriers are required in addition to compile time.
34*49cdfc7eSAndroid Build Coastguard Worker  *
35*49cdfc7eSAndroid Build Coastguard Worker  * We use full sequential ordering (__ATOMIC_SEQ_CST) for the sake of
36*49cdfc7eSAndroid Build Coastguard Worker  * simplicity. LTP tests tend to be syscall heavy so any performance gain from
37*49cdfc7eSAndroid Build Coastguard Worker  * using a weaker memory model is unlikely to result in a relatively large
38*49cdfc7eSAndroid Build Coastguard Worker  * performance improvement while at the same time being a potent source of
39*49cdfc7eSAndroid Build Coastguard Worker  * confusion.
40*49cdfc7eSAndroid Build Coastguard Worker  *
41*49cdfc7eSAndroid Build Coastguard Worker  * Likewise, for the fallback ASM, the simplest "definitely will work, always"
42*49cdfc7eSAndroid Build Coastguard Worker  * approach is preferred over anything more performant.
43*49cdfc7eSAndroid Build Coastguard Worker  *
44*49cdfc7eSAndroid Build Coastguard Worker  * Also see Documentation/memory-barriers.txt in the kernel tree and
45*49cdfc7eSAndroid Build Coastguard Worker  * https://gcc.gnu.org/onlinedocs/gcc/_005f_005fatomic-Builtins.html
46*49cdfc7eSAndroid Build Coastguard Worker  * terminology may vary between sources.
47*49cdfc7eSAndroid Build Coastguard Worker  */
48*49cdfc7eSAndroid Build Coastguard Worker 
49*49cdfc7eSAndroid Build Coastguard Worker #ifndef TST_ATOMIC_H__
50*49cdfc7eSAndroid Build Coastguard Worker #define TST_ATOMIC_H__
51*49cdfc7eSAndroid Build Coastguard Worker 
52*49cdfc7eSAndroid Build Coastguard Worker #include "config.h"
53*49cdfc7eSAndroid Build Coastguard Worker 
54*49cdfc7eSAndroid Build Coastguard Worker #if HAVE_ATOMIC_MEMORY_MODEL == 1
tst_atomic_add_return(int i,int * v)55*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
56*49cdfc7eSAndroid Build Coastguard Worker {
57*49cdfc7eSAndroid Build Coastguard Worker 	return __atomic_add_fetch(v, i, __ATOMIC_SEQ_CST);
58*49cdfc7eSAndroid Build Coastguard Worker }
59*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_load(int * v)60*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
61*49cdfc7eSAndroid Build Coastguard Worker {
62*49cdfc7eSAndroid Build Coastguard Worker 	return __atomic_load_n(v, __ATOMIC_SEQ_CST);
63*49cdfc7eSAndroid Build Coastguard Worker }
64*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)65*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
66*49cdfc7eSAndroid Build Coastguard Worker {
67*49cdfc7eSAndroid Build Coastguard Worker 	__atomic_store_n(v, i, __ATOMIC_SEQ_CST);
68*49cdfc7eSAndroid Build Coastguard Worker }
69*49cdfc7eSAndroid Build Coastguard Worker 
70*49cdfc7eSAndroid Build Coastguard Worker #elif HAVE_SYNC_ADD_AND_FETCH == 1
tst_atomic_add_return(int i,int * v)71*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
72*49cdfc7eSAndroid Build Coastguard Worker {
73*49cdfc7eSAndroid Build Coastguard Worker 	return __sync_add_and_fetch(v, i);
74*49cdfc7eSAndroid Build Coastguard Worker }
75*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_load(int * v)76*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
77*49cdfc7eSAndroid Build Coastguard Worker {
78*49cdfc7eSAndroid Build Coastguard Worker 	int ret;
79*49cdfc7eSAndroid Build Coastguard Worker 
80*49cdfc7eSAndroid Build Coastguard Worker 	__sync_synchronize();
81*49cdfc7eSAndroid Build Coastguard Worker 	ret = *v;
82*49cdfc7eSAndroid Build Coastguard Worker 	__sync_synchronize();
83*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
84*49cdfc7eSAndroid Build Coastguard Worker }
85*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)86*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
87*49cdfc7eSAndroid Build Coastguard Worker {
88*49cdfc7eSAndroid Build Coastguard Worker 	__sync_synchronize();
89*49cdfc7eSAndroid Build Coastguard Worker 	*v = i;
90*49cdfc7eSAndroid Build Coastguard Worker 	__sync_synchronize();
91*49cdfc7eSAndroid Build Coastguard Worker }
92*49cdfc7eSAndroid Build Coastguard Worker 
93*49cdfc7eSAndroid Build Coastguard Worker #elif defined(__i386__) || defined(__x86_64__)
94*49cdfc7eSAndroid Build Coastguard Worker # define LTP_USE_GENERIC_LOAD_STORE_ASM 1
95*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_add_return(int i,int * v)96*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
97*49cdfc7eSAndroid Build Coastguard Worker {
98*49cdfc7eSAndroid Build Coastguard Worker 	int __ret = i;
99*49cdfc7eSAndroid Build Coastguard Worker 
100*49cdfc7eSAndroid Build Coastguard Worker 	/*
101*49cdfc7eSAndroid Build Coastguard Worker 	 * taken from arch/x86/include/asm/cmpxchg.h
102*49cdfc7eSAndroid Build Coastguard Worker 	 */
103*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile ("lock; xaddl %0, %1\n"
104*49cdfc7eSAndroid Build Coastguard Worker 		: "+r" (__ret), "+m" (*v) : : "memory", "cc");
105*49cdfc7eSAndroid Build Coastguard Worker 
106*49cdfc7eSAndroid Build Coastguard Worker 	return i + __ret;
107*49cdfc7eSAndroid Build Coastguard Worker }
108*49cdfc7eSAndroid Build Coastguard Worker 
109*49cdfc7eSAndroid Build Coastguard Worker #elif defined(__powerpc__) || defined(__powerpc64__)
tst_atomic_add_return(int i,int * v)110*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
111*49cdfc7eSAndroid Build Coastguard Worker {
112*49cdfc7eSAndroid Build Coastguard Worker 	int t;
113*49cdfc7eSAndroid Build Coastguard Worker 
114*49cdfc7eSAndroid Build Coastguard Worker 	/* taken from arch/powerpc/include/asm/atomic.h */
115*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile(
116*49cdfc7eSAndroid Build Coastguard Worker 		"	sync\n"
117*49cdfc7eSAndroid Build Coastguard Worker 		"1:	lwarx	%0,0,%2		# atomic_add_return\n"
118*49cdfc7eSAndroid Build Coastguard Worker 		"	add %0,%1,%0\n"
119*49cdfc7eSAndroid Build Coastguard Worker 		"	stwcx.	%0,0,%2 \n"
120*49cdfc7eSAndroid Build Coastguard Worker 		"	bne-	1b\n"
121*49cdfc7eSAndroid Build Coastguard Worker 		"	sync\n"
122*49cdfc7eSAndroid Build Coastguard Worker 		: "=&r" (t)
123*49cdfc7eSAndroid Build Coastguard Worker 		: "r" (i), "r" (v)
124*49cdfc7eSAndroid Build Coastguard Worker 		: "cc", "memory");
125*49cdfc7eSAndroid Build Coastguard Worker 
126*49cdfc7eSAndroid Build Coastguard Worker 	return t;
127*49cdfc7eSAndroid Build Coastguard Worker }
128*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_load(int * v)129*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
130*49cdfc7eSAndroid Build Coastguard Worker {
131*49cdfc7eSAndroid Build Coastguard Worker 	int ret;
132*49cdfc7eSAndroid Build Coastguard Worker 
133*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("sync\n" : : : "memory");
134*49cdfc7eSAndroid Build Coastguard Worker 	ret = *v;
135*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("sync\n" : : : "memory");
136*49cdfc7eSAndroid Build Coastguard Worker 
137*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
138*49cdfc7eSAndroid Build Coastguard Worker }
139*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)140*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
141*49cdfc7eSAndroid Build Coastguard Worker {
142*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("sync\n" : : : "memory");
143*49cdfc7eSAndroid Build Coastguard Worker 	*v = i;
144*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("sync\n" : : : "memory");
145*49cdfc7eSAndroid Build Coastguard Worker }
146*49cdfc7eSAndroid Build Coastguard Worker 
147*49cdfc7eSAndroid Build Coastguard Worker #elif defined(__s390__) || defined(__s390x__)
148*49cdfc7eSAndroid Build Coastguard Worker # define LTP_USE_GENERIC_LOAD_STORE_ASM 1
149*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_add_return(int i,int * v)150*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
151*49cdfc7eSAndroid Build Coastguard Worker {
152*49cdfc7eSAndroid Build Coastguard Worker 	int old_val, new_val;
153*49cdfc7eSAndroid Build Coastguard Worker 
154*49cdfc7eSAndroid Build Coastguard Worker 	/* taken from arch/s390/include/asm/atomic.h */
155*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile(
156*49cdfc7eSAndroid Build Coastguard Worker 		"	l	%0,%2\n"
157*49cdfc7eSAndroid Build Coastguard Worker 		"0:	lr	%1,%0\n"
158*49cdfc7eSAndroid Build Coastguard Worker 		"	ar	%1,%3\n"
159*49cdfc7eSAndroid Build Coastguard Worker 		"	cs	%0,%1,%2\n"
160*49cdfc7eSAndroid Build Coastguard Worker 		"	jl	0b"
161*49cdfc7eSAndroid Build Coastguard Worker 		: "=&d" (old_val), "=&d" (new_val), "+Q" (*v)
162*49cdfc7eSAndroid Build Coastguard Worker 		: "d" (i)
163*49cdfc7eSAndroid Build Coastguard Worker 		: "cc", "memory");
164*49cdfc7eSAndroid Build Coastguard Worker 
165*49cdfc7eSAndroid Build Coastguard Worker 	return old_val + i;
166*49cdfc7eSAndroid Build Coastguard Worker }
167*49cdfc7eSAndroid Build Coastguard Worker 
168*49cdfc7eSAndroid Build Coastguard Worker #elif defined(__arc__)
169*49cdfc7eSAndroid Build Coastguard Worker 
170*49cdfc7eSAndroid Build Coastguard Worker /*ARCv2 defines the smp barriers */
171*49cdfc7eSAndroid Build Coastguard Worker #ifdef __ARC700__
172*49cdfc7eSAndroid Build Coastguard Worker #define smp_mb()	asm volatile("" : : : "memory")
173*49cdfc7eSAndroid Build Coastguard Worker #else
174*49cdfc7eSAndroid Build Coastguard Worker #define smp_mb()	asm volatile("dmb 3\n" : : : "memory")
175*49cdfc7eSAndroid Build Coastguard Worker #endif
176*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_add_return(int i,int * v)177*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
178*49cdfc7eSAndroid Build Coastguard Worker {
179*49cdfc7eSAndroid Build Coastguard Worker 	unsigned int val;
180*49cdfc7eSAndroid Build Coastguard Worker 
181*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
182*49cdfc7eSAndroid Build Coastguard Worker 
183*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile(
184*49cdfc7eSAndroid Build Coastguard Worker 		"1:	llock   %[val], [%[ctr]]	\n"
185*49cdfc7eSAndroid Build Coastguard Worker 		"	add     %[val], %[val], %[i]	\n"
186*49cdfc7eSAndroid Build Coastguard Worker 		"	scond   %[val], [%[ctr]]	\n"
187*49cdfc7eSAndroid Build Coastguard Worker 		"	bnz     1b			\n"
188*49cdfc7eSAndroid Build Coastguard Worker 		: [val]	"=&r"	(val)
189*49cdfc7eSAndroid Build Coastguard Worker 		: [ctr]	"r"	(v),
190*49cdfc7eSAndroid Build Coastguard Worker 		  [i]	"ir"	(i)
191*49cdfc7eSAndroid Build Coastguard Worker 		: "cc", "memory");
192*49cdfc7eSAndroid Build Coastguard Worker 
193*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
194*49cdfc7eSAndroid Build Coastguard Worker 
195*49cdfc7eSAndroid Build Coastguard Worker 	return val;
196*49cdfc7eSAndroid Build Coastguard Worker }
197*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_load(int * v)198*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
199*49cdfc7eSAndroid Build Coastguard Worker {
200*49cdfc7eSAndroid Build Coastguard Worker 	int ret;
201*49cdfc7eSAndroid Build Coastguard Worker 
202*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
203*49cdfc7eSAndroid Build Coastguard Worker 	ret = *v;
204*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
205*49cdfc7eSAndroid Build Coastguard Worker 
206*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
207*49cdfc7eSAndroid Build Coastguard Worker }
208*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)209*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
210*49cdfc7eSAndroid Build Coastguard Worker {
211*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
212*49cdfc7eSAndroid Build Coastguard Worker 	*v = i;
213*49cdfc7eSAndroid Build Coastguard Worker 	smp_mb();
214*49cdfc7eSAndroid Build Coastguard Worker }
215*49cdfc7eSAndroid Build Coastguard Worker 
216*49cdfc7eSAndroid Build Coastguard Worker #elif defined (__aarch64__)
tst_atomic_add_return(int i,int * v)217*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
218*49cdfc7eSAndroid Build Coastguard Worker {
219*49cdfc7eSAndroid Build Coastguard Worker 	unsigned long tmp;
220*49cdfc7eSAndroid Build Coastguard Worker 	int result;
221*49cdfc7eSAndroid Build Coastguard Worker 
222*49cdfc7eSAndroid Build Coastguard Worker 	__asm__ __volatile__(
223*49cdfc7eSAndroid Build Coastguard Worker "       prfm    pstl1strm, %2	\n"
224*49cdfc7eSAndroid Build Coastguard Worker "1:     ldaxr	%w0, %2		\n"
225*49cdfc7eSAndroid Build Coastguard Worker "       add	%w0, %w0, %w3	\n"
226*49cdfc7eSAndroid Build Coastguard Worker "       stlxr	%w1, %w0, %2	\n"
227*49cdfc7eSAndroid Build Coastguard Worker "       cbnz	%w1, 1b		\n"
228*49cdfc7eSAndroid Build Coastguard Worker "       dmb ish			\n"
229*49cdfc7eSAndroid Build Coastguard Worker 	: "=&r" (result), "=&r" (tmp), "+Q" (*v)
230*49cdfc7eSAndroid Build Coastguard Worker 	: "Ir" (i)
231*49cdfc7eSAndroid Build Coastguard Worker 	: "memory");
232*49cdfc7eSAndroid Build Coastguard Worker 
233*49cdfc7eSAndroid Build Coastguard Worker 	return result;
234*49cdfc7eSAndroid Build Coastguard Worker }
235*49cdfc7eSAndroid Build Coastguard Worker 
236*49cdfc7eSAndroid Build Coastguard Worker /* We are using load and store exclusive (ldaxr & stlxr) instructions to try
237*49cdfc7eSAndroid Build Coastguard Worker  * and help prevent the tst_atomic_load and, more likely, tst_atomic_store
238*49cdfc7eSAndroid Build Coastguard Worker  * functions from interfering with tst_atomic_add_return which takes advantage
239*49cdfc7eSAndroid Build Coastguard Worker  * of exclusivity. It is not clear if this is a good idea or not, but does
240*49cdfc7eSAndroid Build Coastguard Worker  * mean that all three functions are very similar.
241*49cdfc7eSAndroid Build Coastguard Worker  */
tst_atomic_load(int * v)242*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
243*49cdfc7eSAndroid Build Coastguard Worker {
244*49cdfc7eSAndroid Build Coastguard Worker 	int ret;
245*49cdfc7eSAndroid Build Coastguard Worker 	unsigned long tmp;
246*49cdfc7eSAndroid Build Coastguard Worker 
247*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("//atomic_load			\n"
248*49cdfc7eSAndroid Build Coastguard Worker 		"	prfm	pstl1strm,  %[v]	\n"
249*49cdfc7eSAndroid Build Coastguard Worker 		"1:	ldaxr	%w[ret], %[v]		\n"
250*49cdfc7eSAndroid Build Coastguard Worker 		"	stlxr   %w[tmp], %w[ret], %[v]  \n"
251*49cdfc7eSAndroid Build Coastguard Worker 		"	cbnz    %w[tmp], 1b		\n"
252*49cdfc7eSAndroid Build Coastguard Worker 		"	dmb ish				\n"
253*49cdfc7eSAndroid Build Coastguard Worker 		: [tmp] "=&r" (tmp), [ret] "=&r" (ret), [v] "+Q" (*v)
254*49cdfc7eSAndroid Build Coastguard Worker 		: : "memory");
255*49cdfc7eSAndroid Build Coastguard Worker 
256*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
257*49cdfc7eSAndroid Build Coastguard Worker }
258*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)259*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
260*49cdfc7eSAndroid Build Coastguard Worker {
261*49cdfc7eSAndroid Build Coastguard Worker 	unsigned long tmp;
262*49cdfc7eSAndroid Build Coastguard Worker 
263*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("//atomic_store			\n"
264*49cdfc7eSAndroid Build Coastguard Worker 		"	prfm	pstl1strm, %[v]		\n"
265*49cdfc7eSAndroid Build Coastguard Worker 		"1:	ldaxr	%w[tmp], %[v]		\n"
266*49cdfc7eSAndroid Build Coastguard Worker 		"	stlxr   %w[tmp], %w[i], %[v]	\n"
267*49cdfc7eSAndroid Build Coastguard Worker 		"	cbnz    %w[tmp], 1b		\n"
268*49cdfc7eSAndroid Build Coastguard Worker 		"	dmb ish				\n"
269*49cdfc7eSAndroid Build Coastguard Worker 		: [tmp] "=&r" (tmp), [v] "+Q" (*v)
270*49cdfc7eSAndroid Build Coastguard Worker 		: [i] "r" (i)
271*49cdfc7eSAndroid Build Coastguard Worker 		: "memory");
272*49cdfc7eSAndroid Build Coastguard Worker }
273*49cdfc7eSAndroid Build Coastguard Worker 
274*49cdfc7eSAndroid Build Coastguard Worker #elif defined(__sparc__) && defined(__arch64__)
275*49cdfc7eSAndroid Build Coastguard Worker # define LTP_USE_GENERIC_LOAD_STORE_ASM 1
tst_atomic_add_return(int i,int * v)276*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_add_return(int i, int *v)
277*49cdfc7eSAndroid Build Coastguard Worker {
278*49cdfc7eSAndroid Build Coastguard Worker 	int ret, tmp;
279*49cdfc7eSAndroid Build Coastguard Worker 
280*49cdfc7eSAndroid Build Coastguard Worker 	/* Based on arch/sparc/lib/atomic_64.S with the exponential backoff
281*49cdfc7eSAndroid Build Coastguard Worker 	 * function removed because we are unlikely to have a large (>= 16?)
282*49cdfc7eSAndroid Build Coastguard Worker 	 * number of cores continuously trying to update one variable.
283*49cdfc7eSAndroid Build Coastguard Worker 	 */
284*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("/*atomic_add_return*/		\n"
285*49cdfc7eSAndroid Build Coastguard Worker 		"1:	ldsw	[%[v]], %[ret];		\n"
286*49cdfc7eSAndroid Build Coastguard Worker 		"	add	%[ret], %[i], %[tmp];	\n"
287*49cdfc7eSAndroid Build Coastguard Worker 		"	cas	[%[v]], %[ret], %[tmp];	\n"
288*49cdfc7eSAndroid Build Coastguard Worker 		"	cmp	%[ret], %[tmp];		\n"
289*49cdfc7eSAndroid Build Coastguard Worker 		"	bne,pn	%%icc, 1b;		\n"
290*49cdfc7eSAndroid Build Coastguard Worker 		"	nop;				\n"
291*49cdfc7eSAndroid Build Coastguard Worker 		"	add	%[ret], %[i], %[ret];	\n"
292*49cdfc7eSAndroid Build Coastguard Worker 		: [ret] "=r&" (ret), [tmp] "=r&" (tmp)
293*49cdfc7eSAndroid Build Coastguard Worker 		: [i] "r" (i), [v] "r" (v)
294*49cdfc7eSAndroid Build Coastguard Worker 		: "memory", "cc");
295*49cdfc7eSAndroid Build Coastguard Worker 
296*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
297*49cdfc7eSAndroid Build Coastguard Worker }
298*49cdfc7eSAndroid Build Coastguard Worker 
299*49cdfc7eSAndroid Build Coastguard Worker #else /* HAVE_SYNC_ADD_AND_FETCH == 1 */
300*49cdfc7eSAndroid Build Coastguard Worker # error Your compiler does not provide __atomic_add_fetch, __sync_add_and_fetch \
301*49cdfc7eSAndroid Build Coastguard Worker         and an LTP implementation is missing for your architecture.
302*49cdfc7eSAndroid Build Coastguard Worker #endif
303*49cdfc7eSAndroid Build Coastguard Worker 
304*49cdfc7eSAndroid Build Coastguard Worker #ifdef LTP_USE_GENERIC_LOAD_STORE_ASM
tst_atomic_load(int * v)305*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_load(int *v)
306*49cdfc7eSAndroid Build Coastguard Worker {
307*49cdfc7eSAndroid Build Coastguard Worker 	int ret;
308*49cdfc7eSAndroid Build Coastguard Worker 
309*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("" : : : "memory");
310*49cdfc7eSAndroid Build Coastguard Worker 	ret = *v;
311*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("" : : : "memory");
312*49cdfc7eSAndroid Build Coastguard Worker 
313*49cdfc7eSAndroid Build Coastguard Worker 	return ret;
314*49cdfc7eSAndroid Build Coastguard Worker }
315*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_store(int i,int * v)316*49cdfc7eSAndroid Build Coastguard Worker static inline void tst_atomic_store(int i, int *v)
317*49cdfc7eSAndroid Build Coastguard Worker {
318*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("" : : : "memory");
319*49cdfc7eSAndroid Build Coastguard Worker 	*v = i;
320*49cdfc7eSAndroid Build Coastguard Worker 	asm volatile("" : : : "memory");
321*49cdfc7eSAndroid Build Coastguard Worker }
322*49cdfc7eSAndroid Build Coastguard Worker #endif
323*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_inc(int * v)324*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_inc(int *v)
325*49cdfc7eSAndroid Build Coastguard Worker {
326*49cdfc7eSAndroid Build Coastguard Worker 	return tst_atomic_add_return(1, v);
327*49cdfc7eSAndroid Build Coastguard Worker }
328*49cdfc7eSAndroid Build Coastguard Worker 
tst_atomic_dec(int * v)329*49cdfc7eSAndroid Build Coastguard Worker static inline int tst_atomic_dec(int *v)
330*49cdfc7eSAndroid Build Coastguard Worker {
331*49cdfc7eSAndroid Build Coastguard Worker 	return tst_atomic_add_return(-1, v);
332*49cdfc7eSAndroid Build Coastguard Worker }
333*49cdfc7eSAndroid Build Coastguard Worker 
334*49cdfc7eSAndroid Build Coastguard Worker #endif	/* TST_ATOMIC_H__ */
335