1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_ATOMIC64_64_H
3#define _ASM_X86_ATOMIC64_64_H
4
5#include <linux/types.h>
6#include <asm/alternative.h>
7#include <asm/cmpxchg.h>
8
9/* The 64-bit atomic type */
10
11#define ATOMIC64_INIT(i) { (i) }
12
13/**
14 * arch_atomic64_read - read atomic64 variable
15 * @v: pointer of type atomic64_t
16 *
17 * Atomically reads the value of @v.
18 * Doesn't imply a read memory barrier.
19 */
20static inline long arch_atomic64_read(const atomic64_t *v)
21{
22 return READ_ONCE((v)->counter);
23}
24
25/**
26 * arch_atomic64_set - set atomic64 variable
27 * @v: pointer to type atomic64_t
28 * @i: required value
29 *
30 * Atomically sets the value of @v to @i.
31 */
32static inline void arch_atomic64_set(atomic64_t *v, long i)
33{
34 WRITE_ONCE(v->counter, i);
35}
36
37/**
38 * arch_atomic64_add - add integer to atomic64 variable
39 * @i: integer value to add
40 * @v: pointer to type atomic64_t
41 *
42 * Atomically adds @i to @v.
43 */
44static __always_inline void arch_atomic64_add(long i, atomic64_t *v)
45{
46 asm volatile(LOCK_PREFIX "addq %1,%0"
47 : "=m" (v->counter)
48 : "er" (i), "m" (v->counter));
49}
50
51/**
52 * arch_atomic64_sub - subtract the atomic64 variable
53 * @i: integer value to subtract
54 * @v: pointer to type atomic64_t
55 *
56 * Atomically subtracts @i from @v.
57 */
58static inline void arch_atomic64_sub(long i, atomic64_t *v)
59{
60 asm volatile(LOCK_PREFIX "subq %1,%0"
61 : "=m" (v->counter)
62 : "er" (i), "m" (v->counter));
63}
64
65/**
66 * arch_atomic64_sub_and_test - subtract value from variable and test result
67 * @i: integer value to subtract
68 * @v: pointer to type atomic64_t
69 *
70 * Atomically subtracts @i from @v and returns
71 * true if the result is zero, or false for all
72 * other cases.
73 */
74static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v)
75{
76 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
77}
78#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
79
80/**
81 * arch_atomic64_inc - increment atomic64 variable
82 * @v: pointer to type atomic64_t
83 *
84 * Atomically increments @v by 1.
85 */
86static __always_inline void arch_atomic64_inc(atomic64_t *v)
87{
88 asm volatile(LOCK_PREFIX "incq %0"
89 : "=m" (v->counter)
90 : "m" (v->counter));
91}
92#define arch_atomic64_inc arch_atomic64_inc
93
94/**
95 * arch_atomic64_dec - decrement atomic64 variable
96 * @v: pointer to type atomic64_t
97 *
98 * Atomically decrements @v by 1.
99 */
100static __always_inline void arch_atomic64_dec(atomic64_t *v)
101{
102 asm volatile(LOCK_PREFIX "decq %0"
103 : "=m" (v->counter)
104 : "m" (v->counter));
105}
106#define arch_atomic64_dec arch_atomic64_dec
107
108/**
109 * arch_atomic64_dec_and_test - decrement and test
110 * @v: pointer to type atomic64_t
111 *
112 * Atomically decrements @v by 1 and
113 * returns true if the result is 0, or false for all other
114 * cases.
115 */
116static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
117{
118 return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
119}
120#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
121
122/**
123 * arch_atomic64_inc_and_test - increment and test
124 * @v: pointer to type atomic64_t
125 *
126 * Atomically increments @v by 1
127 * and returns true if the result is zero, or false for all
128 * other cases.
129 */
130static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
131{
132 return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
133}
134#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
135
136/**
137 * arch_atomic64_add_negative - add and test if negative
138 * @i: integer value to add
139 * @v: pointer to type atomic64_t
140 *
141 * Atomically adds @i to @v and returns true
142 * if the result is negative, or false when
143 * result is greater than or equal to zero.
144 */
145static inline bool arch_atomic64_add_negative(long i, atomic64_t *v)
146{
147 return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
148}
149#define arch_atomic64_add_negative arch_atomic64_add_negative
150
151/**
152 * arch_atomic64_add_return - add and return
153 * @i: integer value to add
154 * @v: pointer to type atomic64_t
155 *
156 * Atomically adds @i to @v and returns @i + @v
157 */
158static __always_inline long arch_atomic64_add_return(long i, atomic64_t *v)
159{
160 return i + xadd(&v->counter, i);
161}
162
163static inline long arch_atomic64_sub_return(long i, atomic64_t *v)
164{
165 return arch_atomic64_add_return(-i, v);
166}
167
168static inline long arch_atomic64_fetch_add(long i, atomic64_t *v)
169{
170 return xadd(&v->counter, i);
171}
172
173static inline long arch_atomic64_fetch_sub(long i, atomic64_t *v)
174{
175 return xadd(&v->counter, -i);
176}
177
178static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new)
179{
180 return arch_cmpxchg(&v->counter, old, new);
181}
182
183#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
184static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new)
185{
186 return try_cmpxchg(&v->counter, old, new);
187}
188
189static inline long arch_atomic64_xchg(atomic64_t *v, long new)
190{
191 return arch_xchg(&v->counter, new);
192}
193
194static inline void arch_atomic64_and(long i, atomic64_t *v)
195{
196 asm volatile(LOCK_PREFIX "andq %1,%0"
197 : "+m" (v->counter)
198 : "er" (i)
199 : "memory");
200}
201
202static inline long arch_atomic64_fetch_and(long i, atomic64_t *v)
203{
204 s64 val = arch_atomic64_read(v);
205
206 do {
207 } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
208 return val;
209}
210
211static inline void arch_atomic64_or(long i, atomic64_t *v)
212{
213 asm volatile(LOCK_PREFIX "orq %1,%0"
214 : "+m" (v->counter)
215 : "er" (i)
216 : "memory");
217}
218
219static inline long arch_atomic64_fetch_or(long i, atomic64_t *v)
220{
221 s64 val = arch_atomic64_read(v);
222
223 do {
224 } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
225 return val;
226}
227
228static inline void arch_atomic64_xor(long i, atomic64_t *v)
229{
230 asm volatile(LOCK_PREFIX "xorq %1,%0"
231 : "+m" (v->counter)
232 : "er" (i)
233 : "memory");
234}
235
236static inline long arch_atomic64_fetch_xor(long i, atomic64_t *v)
237{
238 s64 val = arch_atomic64_read(v);
239
240 do {
241 } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
242 return val;
243}
244
245#endif /* _ASM_X86_ATOMIC64_64_H */
246