1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_LOCAL_H
3 #define _ALPHA_LOCAL_H
4
5 #include <linux/percpu.h>
6 #include <linux/atomic.h>
7
8 typedef struct
9 {
10 atomic_long_t a;
11 } local_t;
12
13 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
14 #define local_read(l) atomic_long_read(&(l)->a)
15 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
16 #define local_inc(l) atomic_long_inc(&(l)->a)
17 #define local_dec(l) atomic_long_dec(&(l)->a)
18 #define local_add(i,l) atomic_long_add((i),(&(l)->a))
19 #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
20
local_add_return(long i,local_t * l)21 static __inline__ long local_add_return(long i, local_t * l)
22 {
23 long temp, result;
24 __asm__ __volatile__(
25 "1: ldq_l %0,%1\n"
26 " addq %0,%3,%2\n"
27 " addq %0,%3,%0\n"
28 " stq_c %0,%1\n"
29 " beq %0,2f\n"
30 ".subsection 2\n"
31 "2: br 1b\n"
32 ".previous"
33 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
34 :"Ir" (i), "m" (l->a.counter) : "memory");
35 return result;
36 }
37
local_sub_return(long i,local_t * l)38 static __inline__ long local_sub_return(long i, local_t * l)
39 {
40 long temp, result;
41 __asm__ __volatile__(
42 "1: ldq_l %0,%1\n"
43 " subq %0,%3,%2\n"
44 " subq %0,%3,%0\n"
45 " stq_c %0,%1\n"
46 " beq %0,2f\n"
47 ".subsection 2\n"
48 "2: br 1b\n"
49 ".previous"
50 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
51 :"Ir" (i), "m" (l->a.counter) : "memory");
52 return result;
53 }
54
local_cmpxchg(local_t * l,long old,long new)55 static __inline__ long local_cmpxchg(local_t *l, long old, long new)
56 {
57 return cmpxchg_local(&l->a.counter, old, new);
58 }
59
local_try_cmpxchg(local_t * l,long * old,long new)60 static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
61 {
62 return try_cmpxchg_local(&l->a.counter, (s64 *)old, new);
63 }
64
65 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
66
67 /**
68 * local_add_unless - add unless the number is already a given value
69 * @l: pointer of type local_t
70 * @a: the amount to add to l...
71 * @u: ...unless l is equal to u.
72 *
73 * Atomically adds @a to @l, if @v was not already @u.
74 * Returns true if the addition was done.
75 */
76 static __inline__ bool
local_add_unless(local_t * l,long a,long u)77 local_add_unless(local_t *l, long a, long u)
78 {
79 long c = local_read(l);
80
81 do {
82 if (unlikely(c == u))
83 return false;
84 } while (!local_try_cmpxchg(l, &c, c + a));
85
86 return true;
87 }
88
89 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
90
91 #define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
92
93 #define local_dec_return(l) local_sub_return(1,(l))
94
95 #define local_inc_return(l) local_add_return(1,(l))
96
97 #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
98
99 #define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
100
101 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
102
103 /* Verify if faster than atomic ops */
104 #define __local_inc(l) ((l)->a.counter++)
105 #define __local_dec(l) ((l)->a.counter++)
106 #define __local_add(i,l) ((l)->a.counter+=(i))
107 #define __local_sub(i,l) ((l)->a.counter-=(i))
108
109 #endif /* _ALPHA_LOCAL_H */
110