1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-fallback.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
6#ifndef _LINUX_ATOMIC_FALLBACK_H
7#define _LINUX_ATOMIC_FALLBACK_H
8
9#ifndef xchg_relaxed
10#define xchg_relaxed xchg
11#define xchg_acquire xchg
12#define xchg_release xchg
13#else /* xchg_relaxed */
14
15#ifndef xchg_acquire
16#define xchg_acquire(...) \
17 __atomic_op_acquire(xchg, __VA_ARGS__)
18#endif
19
20#ifndef xchg_release
21#define xchg_release(...) \
22 __atomic_op_release(xchg, __VA_ARGS__)
23#endif
24
25#ifndef xchg
26#define xchg(...) \
27 __atomic_op_fence(xchg, __VA_ARGS__)
28#endif
29
30#endif /* xchg_relaxed */
31
32#ifndef cmpxchg_relaxed
33#define cmpxchg_relaxed cmpxchg
34#define cmpxchg_acquire cmpxchg
35#define cmpxchg_release cmpxchg
36#else /* cmpxchg_relaxed */
37
38#ifndef cmpxchg_acquire
39#define cmpxchg_acquire(...) \
40 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
41#endif
42
43#ifndef cmpxchg_release
44#define cmpxchg_release(...) \
45 __atomic_op_release(cmpxchg, __VA_ARGS__)
46#endif
47
48#ifndef cmpxchg
49#define cmpxchg(...) \
50 __atomic_op_fence(cmpxchg, __VA_ARGS__)
51#endif
52
53#endif /* cmpxchg_relaxed */
54
55#ifndef cmpxchg64_relaxed
56#define cmpxchg64_relaxed cmpxchg64
57#define cmpxchg64_acquire cmpxchg64
58#define cmpxchg64_release cmpxchg64
59#else /* cmpxchg64_relaxed */
60
61#ifndef cmpxchg64_acquire
62#define cmpxchg64_acquire(...) \
63 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
64#endif
65
66#ifndef cmpxchg64_release
67#define cmpxchg64_release(...) \
68 __atomic_op_release(cmpxchg64, __VA_ARGS__)
69#endif
70
71#ifndef cmpxchg64
72#define cmpxchg64(...) \
73 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
74#endif
75
76#endif /* cmpxchg64_relaxed */
77
78#ifndef atomic_read_acquire
79static inline int
80atomic_read_acquire(const atomic_t *v)
81{
82 return smp_load_acquire(&(v)->counter);
83}
84#define atomic_read_acquire atomic_read_acquire
85#endif
86
87#ifndef atomic_set_release
88static inline void
89atomic_set_release(atomic_t *v, int i)
90{
91 smp_store_release(&(v)->counter, i);
92}
93#define atomic_set_release atomic_set_release
94#endif
95
96#ifndef atomic_add_return_relaxed
97#define atomic_add_return_acquire atomic_add_return
98#define atomic_add_return_release atomic_add_return
99#define atomic_add_return_relaxed atomic_add_return
100#else /* atomic_add_return_relaxed */
101
102#ifndef atomic_add_return_acquire
103static inline int
104atomic_add_return_acquire(int i, atomic_t *v)
105{
106 int ret = atomic_add_return_relaxed(i, v);
107 __atomic_acquire_fence();
108 return ret;
109}
110#define atomic_add_return_acquire atomic_add_return_acquire
111#endif
112
113#ifndef atomic_add_return_release
114static inline int
115atomic_add_return_release(int i, atomic_t *v)
116{
117 __atomic_release_fence();
118 return atomic_add_return_relaxed(i, v);
119}
120#define atomic_add_return_release atomic_add_return_release
121#endif
122
123#ifndef atomic_add_return
124static inline int
125atomic_add_return(int i, atomic_t *v)
126{
127 int ret;
128 __atomic_pre_full_fence();
129 ret = atomic_add_return_relaxed(i, v);
130 __atomic_post_full_fence();
131 return ret;
132}
133#define atomic_add_return atomic_add_return
134#endif
135
136#endif /* atomic_add_return_relaxed */
137
138#ifndef atomic_fetch_add_relaxed
139#define atomic_fetch_add_acquire atomic_fetch_add
140#define atomic_fetch_add_release atomic_fetch_add
141#define atomic_fetch_add_relaxed atomic_fetch_add
142#else /* atomic_fetch_add_relaxed */
143
144#ifndef atomic_fetch_add_acquire
145static inline int
146atomic_fetch_add_acquire(int i, atomic_t *v)
147{
148 int ret = atomic_fetch_add_relaxed(i, v);
149 __atomic_acquire_fence();
150 return ret;
151}
152#define atomic_fetch_add_acquire atomic_fetch_add_acquire
153#endif
154
155#ifndef atomic_fetch_add_release
156static inline int
157atomic_fetch_add_release(int i, atomic_t *v)
158{
159 __atomic_release_fence();
160 return atomic_fetch_add_relaxed(i, v);
161}
162#define atomic_fetch_add_release atomic_fetch_add_release
163#endif
164
165#ifndef atomic_fetch_add
166static inline int
167atomic_fetch_add(int i, atomic_t *v)
168{
169 int ret;
170 __atomic_pre_full_fence();
171 ret = atomic_fetch_add_relaxed(i, v);
172 __atomic_post_full_fence();
173 return ret;
174}
175#define atomic_fetch_add atomic_fetch_add
176#endif
177
178#endif /* atomic_fetch_add_relaxed */
179
180#ifndef atomic_sub_return_relaxed
181#define atomic_sub_return_acquire atomic_sub_return
182#define atomic_sub_return_release atomic_sub_return
183#define atomic_sub_return_relaxed atomic_sub_return
184#else /* atomic_sub_return_relaxed */
185
186#ifndef atomic_sub_return_acquire
187static inline int
188atomic_sub_return_acquire(int i, atomic_t *v)
189{
190 int ret = atomic_sub_return_relaxed(i, v);
191 __atomic_acquire_fence();
192 return ret;
193}
194#define atomic_sub_return_acquire atomic_sub_return_acquire
195#endif
196
197#ifndef atomic_sub_return_release
198static inline int
199atomic_sub_return_release(int i, atomic_t *v)
200{
201 __atomic_release_fence();
202 return atomic_sub_return_relaxed(i, v);
203}
204#define atomic_sub_return_release atomic_sub_return_release
205#endif
206
207#ifndef atomic_sub_return
208static inline int
209atomic_sub_return(int i, atomic_t *v)
210{
211 int ret;
212 __atomic_pre_full_fence();
213 ret = atomic_sub_return_relaxed(i, v);
214 __atomic_post_full_fence();
215 return ret;
216}
217#define atomic_sub_return atomic_sub_return
218#endif
219
220#endif /* atomic_sub_return_relaxed */
221
222#ifndef atomic_fetch_sub_relaxed
223#define atomic_fetch_sub_acquire atomic_fetch_sub
224#define atomic_fetch_sub_release atomic_fetch_sub
225#define atomic_fetch_sub_relaxed atomic_fetch_sub
226#else /* atomic_fetch_sub_relaxed */
227
228#ifndef atomic_fetch_sub_acquire
229static inline int
230atomic_fetch_sub_acquire(int i, atomic_t *v)
231{
232 int ret = atomic_fetch_sub_relaxed(i, v);
233 __atomic_acquire_fence();
234 return ret;
235}
236#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
237#endif
238
239#ifndef atomic_fetch_sub_release
240static inline int
241atomic_fetch_sub_release(int i, atomic_t *v)
242{
243 __atomic_release_fence();
244 return atomic_fetch_sub_relaxed(i, v);
245}
246#define atomic_fetch_sub_release atomic_fetch_sub_release
247#endif
248
249#ifndef atomic_fetch_sub
250static inline int
251atomic_fetch_sub(int i, atomic_t *v)
252{
253 int ret;
254 __atomic_pre_full_fence();
255 ret = atomic_fetch_sub_relaxed(i, v);
256 __atomic_post_full_fence();
257 return ret;
258}
259#define atomic_fetch_sub atomic_fetch_sub
260#endif
261
262#endif /* atomic_fetch_sub_relaxed */
263
264#ifndef atomic_inc
265static inline void
266atomic_inc(atomic_t *v)
267{
268 atomic_add(1, v);
269}
270#define atomic_inc atomic_inc
271#endif
272
273#ifndef atomic_inc_return_relaxed
274#ifdef atomic_inc_return
275#define atomic_inc_return_acquire atomic_inc_return
276#define atomic_inc_return_release atomic_inc_return
277#define atomic_inc_return_relaxed atomic_inc_return
278#endif /* atomic_inc_return */
279
280#ifndef atomic_inc_return
281static inline int
282atomic_inc_return(atomic_t *v)
283{
284 return atomic_add_return(1, v);
285}
286#define atomic_inc_return atomic_inc_return
287#endif
288
289#ifndef atomic_inc_return_acquire
290static inline int
291atomic_inc_return_acquire(atomic_t *v)
292{
293 return atomic_add_return_acquire(1, v);
294}
295#define atomic_inc_return_acquire atomic_inc_return_acquire
296#endif
297
298#ifndef atomic_inc_return_release
299static inline int
300atomic_inc_return_release(atomic_t *v)
301{
302 return atomic_add_return_release(1, v);
303}
304#define atomic_inc_return_release atomic_inc_return_release
305#endif
306
307#ifndef atomic_inc_return_relaxed
308static inline int
309atomic_inc_return_relaxed(atomic_t *v)
310{
311 return atomic_add_return_relaxed(1, v);
312}
313#define atomic_inc_return_relaxed atomic_inc_return_relaxed
314#endif
315
316#else /* atomic_inc_return_relaxed */
317
318#ifndef atomic_inc_return_acquire
319static inline int
320atomic_inc_return_acquire(atomic_t *v)
321{
322 int ret = atomic_inc_return_relaxed(v);
323 __atomic_acquire_fence();
324 return ret;
325}
326#define atomic_inc_return_acquire atomic_inc_return_acquire
327#endif
328
329#ifndef atomic_inc_return_release
330static inline int
331atomic_inc_return_release(atomic_t *v)
332{
333 __atomic_release_fence();
334 return atomic_inc_return_relaxed(v);
335}
336#define atomic_inc_return_release atomic_inc_return_release
337#endif
338
339#ifndef atomic_inc_return
340static inline int
341atomic_inc_return(atomic_t *v)
342{
343 int ret;
344 __atomic_pre_full_fence();
345 ret = atomic_inc_return_relaxed(v);
346 __atomic_post_full_fence();
347 return ret;
348}
349#define atomic_inc_return atomic_inc_return
350#endif
351
352#endif /* atomic_inc_return_relaxed */
353
354#ifndef atomic_fetch_inc_relaxed
355#ifdef atomic_fetch_inc
356#define atomic_fetch_inc_acquire atomic_fetch_inc
357#define atomic_fetch_inc_release atomic_fetch_inc
358#define atomic_fetch_inc_relaxed atomic_fetch_inc
359#endif /* atomic_fetch_inc */
360
361#ifndef atomic_fetch_inc
362static inline int
363atomic_fetch_inc(atomic_t *v)
364{
365 return atomic_fetch_add(1, v);
366}
367#define atomic_fetch_inc atomic_fetch_inc
368#endif
369
370#ifndef atomic_fetch_inc_acquire
371static inline int
372atomic_fetch_inc_acquire(atomic_t *v)
373{
374 return atomic_fetch_add_acquire(1, v);
375}
376#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
377#endif
378
379#ifndef atomic_fetch_inc_release
380static inline int
381atomic_fetch_inc_release(atomic_t *v)
382{
383 return atomic_fetch_add_release(1, v);
384}
385#define atomic_fetch_inc_release atomic_fetch_inc_release
386#endif
387
388#ifndef atomic_fetch_inc_relaxed
389static inline int
390atomic_fetch_inc_relaxed(atomic_t *v)
391{
392 return atomic_fetch_add_relaxed(1, v);
393}
394#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
395#endif
396
397#else /* atomic_fetch_inc_relaxed */
398
399#ifndef atomic_fetch_inc_acquire
400static inline int
401atomic_fetch_inc_acquire(atomic_t *v)
402{
403 int ret = atomic_fetch_inc_relaxed(v);
404 __atomic_acquire_fence();
405 return ret;
406}
407#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
408#endif
409
410#ifndef atomic_fetch_inc_release
411static inline int
412atomic_fetch_inc_release(atomic_t *v)
413{
414 __atomic_release_fence();
415 return atomic_fetch_inc_relaxed(v);
416}
417#define atomic_fetch_inc_release atomic_fetch_inc_release
418#endif
419
420#ifndef atomic_fetch_inc
421static inline int
422atomic_fetch_inc(atomic_t *v)
423{
424 int ret;
425 __atomic_pre_full_fence();
426 ret = atomic_fetch_inc_relaxed(v);
427 __atomic_post_full_fence();
428 return ret;
429}
430#define atomic_fetch_inc atomic_fetch_inc
431#endif
432
433#endif /* atomic_fetch_inc_relaxed */
434
435#ifndef atomic_dec
436static inline void
437atomic_dec(atomic_t *v)
438{
439 atomic_sub(1, v);
440}
441#define atomic_dec atomic_dec
442#endif
443
444#ifndef atomic_dec_return_relaxed
445#ifdef atomic_dec_return
446#define atomic_dec_return_acquire atomic_dec_return
447#define atomic_dec_return_release atomic_dec_return
448#define atomic_dec_return_relaxed atomic_dec_return
449#endif /* atomic_dec_return */
450
451#ifndef atomic_dec_return
452static inline int
453atomic_dec_return(atomic_t *v)
454{
455 return atomic_sub_return(1, v);
456}
457#define atomic_dec_return atomic_dec_return
458#endif
459
460#ifndef atomic_dec_return_acquire
461static inline int
462atomic_dec_return_acquire(atomic_t *v)
463{
464 return atomic_sub_return_acquire(1, v);
465}
466#define atomic_dec_return_acquire atomic_dec_return_acquire
467#endif
468
469#ifndef atomic_dec_return_release
470static inline int
471atomic_dec_return_release(atomic_t *v)
472{
473 return atomic_sub_return_release(1, v);
474}
475#define atomic_dec_return_release atomic_dec_return_release
476#endif
477
478#ifndef atomic_dec_return_relaxed
479static inline int
480atomic_dec_return_relaxed(atomic_t *v)
481{
482 return atomic_sub_return_relaxed(1, v);
483}
484#define atomic_dec_return_relaxed atomic_dec_return_relaxed
485#endif
486
487#else /* atomic_dec_return_relaxed */
488
489#ifndef atomic_dec_return_acquire
490static inline int
491atomic_dec_return_acquire(atomic_t *v)
492{
493 int ret = atomic_dec_return_relaxed(v);
494 __atomic_acquire_fence();
495 return ret;
496}
497#define atomic_dec_return_acquire atomic_dec_return_acquire
498#endif
499
500#ifndef atomic_dec_return_release
501static inline int
502atomic_dec_return_release(atomic_t *v)
503{
504 __atomic_release_fence();
505 return atomic_dec_return_relaxed(v);
506}
507#define atomic_dec_return_release atomic_dec_return_release
508#endif
509
510#ifndef atomic_dec_return
511static inline int
512atomic_dec_return(atomic_t *v)
513{
514 int ret;
515 __atomic_pre_full_fence();
516 ret = atomic_dec_return_relaxed(v);
517 __atomic_post_full_fence();
518 return ret;
519}
520#define atomic_dec_return atomic_dec_return
521#endif
522
523#endif /* atomic_dec_return_relaxed */
524
525#ifndef atomic_fetch_dec_relaxed
526#ifdef atomic_fetch_dec
527#define atomic_fetch_dec_acquire atomic_fetch_dec
528#define atomic_fetch_dec_release atomic_fetch_dec
529#define atomic_fetch_dec_relaxed atomic_fetch_dec
530#endif /* atomic_fetch_dec */
531
532#ifndef atomic_fetch_dec
533static inline int
534atomic_fetch_dec(atomic_t *v)
535{
536 return atomic_fetch_sub(1, v);
537}
538#define atomic_fetch_dec atomic_fetch_dec
539#endif
540
541#ifndef atomic_fetch_dec_acquire
542static inline int
543atomic_fetch_dec_acquire(atomic_t *v)
544{
545 return atomic_fetch_sub_acquire(1, v);
546}
547#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
548#endif
549
550#ifndef atomic_fetch_dec_release
551static inline int
552atomic_fetch_dec_release(atomic_t *v)
553{
554 return atomic_fetch_sub_release(1, v);
555}
556#define atomic_fetch_dec_release atomic_fetch_dec_release
557#endif
558
559#ifndef atomic_fetch_dec_relaxed
560static inline int
561atomic_fetch_dec_relaxed(atomic_t *v)
562{
563 return atomic_fetch_sub_relaxed(1, v);
564}
565#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
566#endif
567
568#else /* atomic_fetch_dec_relaxed */
569
570#ifndef atomic_fetch_dec_acquire
571static inline int
572atomic_fetch_dec_acquire(atomic_t *v)
573{
574 int ret = atomic_fetch_dec_relaxed(v);
575 __atomic_acquire_fence();
576 return ret;
577}
578#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
579#endif
580
581#ifndef atomic_fetch_dec_release
582static inline int
583atomic_fetch_dec_release(atomic_t *v)
584{
585 __atomic_release_fence();
586 return atomic_fetch_dec_relaxed(v);
587}
588#define atomic_fetch_dec_release atomic_fetch_dec_release
589#endif
590
591#ifndef atomic_fetch_dec
592static inline int
593atomic_fetch_dec(atomic_t *v)
594{
595 int ret;
596 __atomic_pre_full_fence();
597 ret = atomic_fetch_dec_relaxed(v);
598 __atomic_post_full_fence();
599 return ret;
600}
601#define atomic_fetch_dec atomic_fetch_dec
602#endif
603
604#endif /* atomic_fetch_dec_relaxed */
605
606#ifndef atomic_fetch_and_relaxed
607#define atomic_fetch_and_acquire atomic_fetch_and
608#define atomic_fetch_and_release atomic_fetch_and
609#define atomic_fetch_and_relaxed atomic_fetch_and
610#else /* atomic_fetch_and_relaxed */
611
612#ifndef atomic_fetch_and_acquire
613static inline int
614atomic_fetch_and_acquire(int i, atomic_t *v)
615{
616 int ret = atomic_fetch_and_relaxed(i, v);
617 __atomic_acquire_fence();
618 return ret;
619}
620#define atomic_fetch_and_acquire atomic_fetch_and_acquire
621#endif
622
623#ifndef atomic_fetch_and_release
624static inline int
625atomic_fetch_and_release(int i, atomic_t *v)
626{
627 __atomic_release_fence();
628 return atomic_fetch_and_relaxed(i, v);
629}
630#define atomic_fetch_and_release atomic_fetch_and_release
631#endif
632
633#ifndef atomic_fetch_and
634static inline int
635atomic_fetch_and(int i, atomic_t *v)
636{
637 int ret;
638 __atomic_pre_full_fence();
639 ret = atomic_fetch_and_relaxed(i, v);
640 __atomic_post_full_fence();
641 return ret;
642}
643#define atomic_fetch_and atomic_fetch_and
644#endif
645
646#endif /* atomic_fetch_and_relaxed */
647
648#ifndef atomic_andnot
649static inline void
650atomic_andnot(int i, atomic_t *v)
651{
652 atomic_and(~i, v);
653}
654#define atomic_andnot atomic_andnot
655#endif
656
657#ifndef atomic_fetch_andnot_relaxed
658#ifdef atomic_fetch_andnot
659#define atomic_fetch_andnot_acquire atomic_fetch_andnot
660#define atomic_fetch_andnot_release atomic_fetch_andnot
661#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
662#endif /* atomic_fetch_andnot */
663
664#ifndef atomic_fetch_andnot
665static inline int
666atomic_fetch_andnot(int i, atomic_t *v)
667{
668 return atomic_fetch_and(~i, v);
669}
670#define atomic_fetch_andnot atomic_fetch_andnot
671#endif
672
673#ifndef atomic_fetch_andnot_acquire
674static inline int
675atomic_fetch_andnot_acquire(int i, atomic_t *v)
676{
677 return atomic_fetch_and_acquire(~i, v);
678}
679#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
680#endif
681
682#ifndef atomic_fetch_andnot_release
683static inline int
684atomic_fetch_andnot_release(int i, atomic_t *v)
685{
686 return atomic_fetch_and_release(~i, v);
687}
688#define atomic_fetch_andnot_release atomic_fetch_andnot_release
689#endif
690
691#ifndef atomic_fetch_andnot_relaxed
692static inline int
693atomic_fetch_andnot_relaxed(int i, atomic_t *v)
694{
695 return atomic_fetch_and_relaxed(~i, v);
696}
697#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
698#endif
699
700#else /* atomic_fetch_andnot_relaxed */
701
702#ifndef atomic_fetch_andnot_acquire
703static inline int
704atomic_fetch_andnot_acquire(int i, atomic_t *v)
705{
706 int ret = atomic_fetch_andnot_relaxed(i, v);
707 __atomic_acquire_fence();
708 return ret;
709}
710#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
711#endif
712
713#ifndef atomic_fetch_andnot_release
714static inline int
715atomic_fetch_andnot_release(int i, atomic_t *v)
716{
717 __atomic_release_fence();
718 return atomic_fetch_andnot_relaxed(i, v);
719}
720#define atomic_fetch_andnot_release atomic_fetch_andnot_release
721#endif
722
723#ifndef atomic_fetch_andnot
724static inline int
725atomic_fetch_andnot(int i, atomic_t *v)
726{
727 int ret;
728 __atomic_pre_full_fence();
729 ret = atomic_fetch_andnot_relaxed(i, v);
730 __atomic_post_full_fence();
731 return ret;
732}
733#define atomic_fetch_andnot atomic_fetch_andnot
734#endif
735
736#endif /* atomic_fetch_andnot_relaxed */
737
738#ifndef atomic_fetch_or_relaxed
739#define atomic_fetch_or_acquire atomic_fetch_or
740#define atomic_fetch_or_release atomic_fetch_or
741#define atomic_fetch_or_relaxed atomic_fetch_or
742#else /* atomic_fetch_or_relaxed */
743
744#ifndef atomic_fetch_or_acquire
745static inline int
746atomic_fetch_or_acquire(int i, atomic_t *v)
747{
748 int ret = atomic_fetch_or_relaxed(i, v);
749 __atomic_acquire_fence();
750 return ret;
751}
752#define atomic_fetch_or_acquire atomic_fetch_or_acquire
753#endif
754
755#ifndef atomic_fetch_or_release
756static inline int
757atomic_fetch_or_release(int i, atomic_t *v)
758{
759 __atomic_release_fence();
760 return atomic_fetch_or_relaxed(i, v);
761}
762#define atomic_fetch_or_release atomic_fetch_or_release
763#endif
764
765#ifndef atomic_fetch_or
766static inline int
767atomic_fetch_or(int i, atomic_t *v)
768{
769 int ret;
770 __atomic_pre_full_fence();
771 ret = atomic_fetch_or_relaxed(i, v);
772 __atomic_post_full_fence();
773 return ret;
774}
775#define atomic_fetch_or atomic_fetch_or
776#endif
777
778#endif /* atomic_fetch_or_relaxed */
779
780#ifndef atomic_fetch_xor_relaxed
781#define atomic_fetch_xor_acquire atomic_fetch_xor
782#define atomic_fetch_xor_release atomic_fetch_xor
783#define atomic_fetch_xor_relaxed atomic_fetch_xor
784#else /* atomic_fetch_xor_relaxed */
785
786#ifndef atomic_fetch_xor_acquire
787static inline int
788atomic_fetch_xor_acquire(int i, atomic_t *v)
789{
790 int ret = atomic_fetch_xor_relaxed(i, v);
791 __atomic_acquire_fence();
792 return ret;
793}
794#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
795#endif
796
797#ifndef atomic_fetch_xor_release
798static inline int
799atomic_fetch_xor_release(int i, atomic_t *v)
800{
801 __atomic_release_fence();
802 return atomic_fetch_xor_relaxed(i, v);
803}
804#define atomic_fetch_xor_release atomic_fetch_xor_release
805#endif
806
807#ifndef atomic_fetch_xor
808static inline int
809atomic_fetch_xor(int i, atomic_t *v)
810{
811 int ret;
812 __atomic_pre_full_fence();
813 ret = atomic_fetch_xor_relaxed(i, v);
814 __atomic_post_full_fence();
815 return ret;
816}
817#define atomic_fetch_xor atomic_fetch_xor
818#endif
819
820#endif /* atomic_fetch_xor_relaxed */
821
822#ifndef atomic_xchg_relaxed
823#define atomic_xchg_acquire atomic_xchg
824#define atomic_xchg_release atomic_xchg
825#define atomic_xchg_relaxed atomic_xchg
826#else /* atomic_xchg_relaxed */
827
828#ifndef atomic_xchg_acquire
829static inline int
830atomic_xchg_acquire(atomic_t *v, int i)
831{
832 int ret = atomic_xchg_relaxed(v, i);
833 __atomic_acquire_fence();
834 return ret;
835}
836#define atomic_xchg_acquire atomic_xchg_acquire
837#endif
838
839#ifndef atomic_xchg_release
840static inline int
841atomic_xchg_release(atomic_t *v, int i)
842{
843 __atomic_release_fence();
844 return atomic_xchg_relaxed(v, i);
845}
846#define atomic_xchg_release atomic_xchg_release
847#endif
848
849#ifndef atomic_xchg
850static inline int
851atomic_xchg(atomic_t *v, int i)
852{
853 int ret;
854 __atomic_pre_full_fence();
855 ret = atomic_xchg_relaxed(v, i);
856 __atomic_post_full_fence();
857 return ret;
858}
859#define atomic_xchg atomic_xchg
860#endif
861
862#endif /* atomic_xchg_relaxed */
863
864#ifndef atomic_cmpxchg_relaxed
865#define atomic_cmpxchg_acquire atomic_cmpxchg
866#define atomic_cmpxchg_release atomic_cmpxchg
867#define atomic_cmpxchg_relaxed atomic_cmpxchg
868#else /* atomic_cmpxchg_relaxed */
869
870#ifndef atomic_cmpxchg_acquire
871static inline int
872atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
873{
874 int ret = atomic_cmpxchg_relaxed(v, old, new);
875 __atomic_acquire_fence();
876 return ret;
877}
878#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
879#endif
880
881#ifndef atomic_cmpxchg_release
882static inline int
883atomic_cmpxchg_release(atomic_t *v, int old, int new)
884{
885 __atomic_release_fence();
886 return atomic_cmpxchg_relaxed(v, old, new);
887}
888#define atomic_cmpxchg_release atomic_cmpxchg_release
889#endif
890
891#ifndef atomic_cmpxchg
892static inline int
893atomic_cmpxchg(atomic_t *v, int old, int new)
894{
895 int ret;
896 __atomic_pre_full_fence();
897 ret = atomic_cmpxchg_relaxed(v, old, new);
898 __atomic_post_full_fence();
899 return ret;
900}
901#define atomic_cmpxchg atomic_cmpxchg
902#endif
903
904#endif /* atomic_cmpxchg_relaxed */
905
906#ifndef atomic_try_cmpxchg_relaxed
907#ifdef atomic_try_cmpxchg
908#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
909#define atomic_try_cmpxchg_release atomic_try_cmpxchg
910#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
911#endif /* atomic_try_cmpxchg */
912
913#ifndef atomic_try_cmpxchg
914static inline bool
915atomic_try_cmpxchg(atomic_t *v, int *old, int new)
916{
917 int r, o = *old;
918 r = atomic_cmpxchg(v, o, new);
919 if (unlikely(r != o))
920 *old = r;
921 return likely(r == o);
922}
923#define atomic_try_cmpxchg atomic_try_cmpxchg
924#endif
925
926#ifndef atomic_try_cmpxchg_acquire
927static inline bool
928atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
929{
930 int r, o = *old;
931 r = atomic_cmpxchg_acquire(v, o, new);
932 if (unlikely(r != o))
933 *old = r;
934 return likely(r == o);
935}
936#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
937#endif
938
939#ifndef atomic_try_cmpxchg_release
940static inline bool
941atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
942{
943 int r, o = *old;
944 r = atomic_cmpxchg_release(v, o, new);
945 if (unlikely(r != o))
946 *old = r;
947 return likely(r == o);
948}
949#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
950#endif
951
952#ifndef atomic_try_cmpxchg_relaxed
953static inline bool
954atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
955{
956 int r, o = *old;
957 r = atomic_cmpxchg_relaxed(v, o, new);
958 if (unlikely(r != o))
959 *old = r;
960 return likely(r == o);
961}
962#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
963#endif
964
965#else /* atomic_try_cmpxchg_relaxed */
966
967#ifndef atomic_try_cmpxchg_acquire
968static inline bool
969atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
970{
971 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
972 __atomic_acquire_fence();
973 return ret;
974}
975#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
976#endif
977
978#ifndef atomic_try_cmpxchg_release
979static inline bool
980atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
981{
982 __atomic_release_fence();
983 return atomic_try_cmpxchg_relaxed(v, old, new);
984}
985#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
986#endif
987
988#ifndef atomic_try_cmpxchg
989static inline bool
990atomic_try_cmpxchg(atomic_t *v, int *old, int new)
991{
992 bool ret;
993 __atomic_pre_full_fence();
994 ret = atomic_try_cmpxchg_relaxed(v, old, new);
995 __atomic_post_full_fence();
996 return ret;
997}
998#define atomic_try_cmpxchg atomic_try_cmpxchg
999#endif
1000
1001#endif /* atomic_try_cmpxchg_relaxed */
1002
1003#ifndef atomic_sub_and_test
1004/**
1005 * atomic_sub_and_test - subtract value from variable and test result
1006 * @i: integer value to subtract
1007 * @v: pointer of type atomic_t
1008 *
1009 * Atomically subtracts @i from @v and returns
1010 * true if the result is zero, or false for all
1011 * other cases.
1012 */
1013static inline bool
1014atomic_sub_and_test(int i, atomic_t *v)
1015{
1016 return atomic_sub_return(i, v) == 0;
1017}
1018#define atomic_sub_and_test atomic_sub_and_test
1019#endif
1020
1021#ifndef atomic_dec_and_test
1022/**
1023 * atomic_dec_and_test - decrement and test
1024 * @v: pointer of type atomic_t
1025 *
1026 * Atomically decrements @v by 1 and
1027 * returns true if the result is 0, or false for all other
1028 * cases.
1029 */
1030static inline bool
1031atomic_dec_and_test(atomic_t *v)
1032{
1033 return atomic_dec_return(v) == 0;
1034}
1035#define atomic_dec_and_test atomic_dec_and_test
1036#endif
1037
1038#ifndef atomic_inc_and_test
1039/**
1040 * atomic_inc_and_test - increment and test
1041 * @v: pointer of type atomic_t
1042 *
1043 * Atomically increments @v by 1
1044 * and returns true if the result is zero, or false for all
1045 * other cases.
1046 */
1047static inline bool
1048atomic_inc_and_test(atomic_t *v)
1049{
1050 return atomic_inc_return(v) == 0;
1051}
1052#define atomic_inc_and_test atomic_inc_and_test
1053#endif
1054
1055#ifndef atomic_add_negative
1056/**
1057 * atomic_add_negative - add and test if negative
1058 * @i: integer value to add
1059 * @v: pointer of type atomic_t
1060 *
1061 * Atomically adds @i to @v and returns true
1062 * if the result is negative, or false when
1063 * result is greater than or equal to zero.
1064 */
1065static inline bool
1066atomic_add_negative(int i, atomic_t *v)
1067{
1068 return atomic_add_return(i, v) < 0;
1069}
1070#define atomic_add_negative atomic_add_negative
1071#endif
1072
1073#ifndef atomic_fetch_add_unless
1074/**
1075 * atomic_fetch_add_unless - add unless the number is already a given value
1076 * @v: pointer of type atomic_t
1077 * @a: the amount to add to v...
1078 * @u: ...unless v is equal to u.
1079 *
1080 * Atomically adds @a to @v, so long as @v was not already @u.
1081 * Returns original value of @v
1082 */
1083static inline int
1084atomic_fetch_add_unless(atomic_t *v, int a, int u)
1085{
1086 int c = atomic_read(v);
1087
1088 do {
1089 if (unlikely(c == u))
1090 break;
1091 } while (!atomic_try_cmpxchg(v, &c, c + a));
1092
1093 return c;
1094}
1095#define atomic_fetch_add_unless atomic_fetch_add_unless
1096#endif
1097
1098#ifndef atomic_add_unless
1099/**
1100 * atomic_add_unless - add unless the number is already a given value
1101 * @v: pointer of type atomic_t
1102 * @a: the amount to add to v...
1103 * @u: ...unless v is equal to u.
1104 *
1105 * Atomically adds @a to @v, if @v was not already @u.
1106 * Returns true if the addition was done.
1107 */
1108static inline bool
1109atomic_add_unless(atomic_t *v, int a, int u)
1110{
1111 return atomic_fetch_add_unless(v, a, u) != u;
1112}
1113#define atomic_add_unless atomic_add_unless
1114#endif
1115
1116#ifndef atomic_inc_not_zero
1117/**
1118 * atomic_inc_not_zero - increment unless the number is zero
1119 * @v: pointer of type atomic_t
1120 *
1121 * Atomically increments @v by 1, if @v is non-zero.
1122 * Returns true if the increment was done.
1123 */
1124static inline bool
1125atomic_inc_not_zero(atomic_t *v)
1126{
1127 return atomic_add_unless(v, 1, 0);
1128}
1129#define atomic_inc_not_zero atomic_inc_not_zero
1130#endif
1131
1132#ifndef atomic_inc_unless_negative
1133static inline bool
1134atomic_inc_unless_negative(atomic_t *v)
1135{
1136 int c = atomic_read(v);
1137
1138 do {
1139 if (unlikely(c < 0))
1140 return false;
1141 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1142
1143 return true;
1144}
1145#define atomic_inc_unless_negative atomic_inc_unless_negative
1146#endif
1147
1148#ifndef atomic_dec_unless_positive
1149static inline bool
1150atomic_dec_unless_positive(atomic_t *v)
1151{
1152 int c = atomic_read(v);
1153
1154 do {
1155 if (unlikely(c > 0))
1156 return false;
1157 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1158
1159 return true;
1160}
1161#define atomic_dec_unless_positive atomic_dec_unless_positive
1162#endif
1163
1164#ifndef atomic_dec_if_positive
1165static inline int
1166atomic_dec_if_positive(atomic_t *v)
1167{
1168 int dec, c = atomic_read(v);
1169
1170 do {
1171 dec = c - 1;
1172 if (unlikely(dec < 0))
1173 break;
1174 } while (!atomic_try_cmpxchg(v, &c, dec));
1175
1176 return dec;
1177}
1178#define atomic_dec_if_positive atomic_dec_if_positive
1179#endif
1180
1181#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1182#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1183
1184#ifdef CONFIG_GENERIC_ATOMIC64
1185#include <asm-generic/atomic64.h>
1186#endif
1187
1188#ifndef atomic64_read_acquire
1189static inline s64
1190atomic64_read_acquire(const atomic64_t *v)
1191{
1192 return smp_load_acquire(&(v)->counter);
1193}
1194#define atomic64_read_acquire atomic64_read_acquire
1195#endif
1196
1197#ifndef atomic64_set_release
1198static inline void
1199atomic64_set_release(atomic64_t *v, s64 i)
1200{
1201 smp_store_release(&(v)->counter, i);
1202}
1203#define atomic64_set_release atomic64_set_release
1204#endif
1205
1206#ifndef atomic64_add_return_relaxed
1207#define atomic64_add_return_acquire atomic64_add_return
1208#define atomic64_add_return_release atomic64_add_return
1209#define atomic64_add_return_relaxed atomic64_add_return
1210#else /* atomic64_add_return_relaxed */
1211
1212#ifndef atomic64_add_return_acquire
1213static inline s64
1214atomic64_add_return_acquire(s64 i, atomic64_t *v)
1215{
1216 s64 ret = atomic64_add_return_relaxed(i, v);
1217 __atomic_acquire_fence();
1218 return ret;
1219}
1220#define atomic64_add_return_acquire atomic64_add_return_acquire
1221#endif
1222
1223#ifndef atomic64_add_return_release
1224static inline s64
1225atomic64_add_return_release(s64 i, atomic64_t *v)
1226{
1227 __atomic_release_fence();
1228 return atomic64_add_return_relaxed(i, v);
1229}
1230#define atomic64_add_return_release atomic64_add_return_release
1231#endif
1232
1233#ifndef atomic64_add_return
1234static inline s64
1235atomic64_add_return(s64 i, atomic64_t *v)
1236{
1237 s64 ret;
1238 __atomic_pre_full_fence();
1239 ret = atomic64_add_return_relaxed(i, v);
1240 __atomic_post_full_fence();
1241 return ret;
1242}
1243#define atomic64_add_return atomic64_add_return
1244#endif
1245
1246#endif /* atomic64_add_return_relaxed */
1247
1248#ifndef atomic64_fetch_add_relaxed
1249#define atomic64_fetch_add_acquire atomic64_fetch_add
1250#define atomic64_fetch_add_release atomic64_fetch_add
1251#define atomic64_fetch_add_relaxed atomic64_fetch_add
1252#else /* atomic64_fetch_add_relaxed */
1253
1254#ifndef atomic64_fetch_add_acquire
1255static inline s64
1256atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1257{
1258 s64 ret = atomic64_fetch_add_relaxed(i, v);
1259 __atomic_acquire_fence();
1260 return ret;
1261}
1262#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1263#endif
1264
1265#ifndef atomic64_fetch_add_release
1266static inline s64
1267atomic64_fetch_add_release(s64 i, atomic64_t *v)
1268{
1269 __atomic_release_fence();
1270 return atomic64_fetch_add_relaxed(i, v);
1271}
1272#define atomic64_fetch_add_release atomic64_fetch_add_release
1273#endif
1274
1275#ifndef atomic64_fetch_add
1276static inline s64
1277atomic64_fetch_add(s64 i, atomic64_t *v)
1278{
1279 s64 ret;
1280 __atomic_pre_full_fence();
1281 ret = atomic64_fetch_add_relaxed(i, v);
1282 __atomic_post_full_fence();
1283 return ret;
1284}
1285#define atomic64_fetch_add atomic64_fetch_add
1286#endif
1287
1288#endif /* atomic64_fetch_add_relaxed */
1289
1290#ifndef atomic64_sub_return_relaxed
1291#define atomic64_sub_return_acquire atomic64_sub_return
1292#define atomic64_sub_return_release atomic64_sub_return
1293#define atomic64_sub_return_relaxed atomic64_sub_return
1294#else /* atomic64_sub_return_relaxed */
1295
1296#ifndef atomic64_sub_return_acquire
1297static inline s64
1298atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1299{
1300 s64 ret = atomic64_sub_return_relaxed(i, v);
1301 __atomic_acquire_fence();
1302 return ret;
1303}
1304#define atomic64_sub_return_acquire atomic64_sub_return_acquire
1305#endif
1306
1307#ifndef atomic64_sub_return_release
1308static inline s64
1309atomic64_sub_return_release(s64 i, atomic64_t *v)
1310{
1311 __atomic_release_fence();
1312 return atomic64_sub_return_relaxed(i, v);
1313}
1314#define atomic64_sub_return_release atomic64_sub_return_release
1315#endif
1316
1317#ifndef atomic64_sub_return
1318static inline s64
1319atomic64_sub_return(s64 i, atomic64_t *v)
1320{
1321 s64 ret;
1322 __atomic_pre_full_fence();
1323 ret = atomic64_sub_return_relaxed(i, v);
1324 __atomic_post_full_fence();
1325 return ret;
1326}
1327#define atomic64_sub_return atomic64_sub_return
1328#endif
1329
1330#endif /* atomic64_sub_return_relaxed */
1331
1332#ifndef atomic64_fetch_sub_relaxed
1333#define atomic64_fetch_sub_acquire atomic64_fetch_sub
1334#define atomic64_fetch_sub_release atomic64_fetch_sub
1335#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1336#else /* atomic64_fetch_sub_relaxed */
1337
1338#ifndef atomic64_fetch_sub_acquire
1339static inline s64
1340atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1341{
1342 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1343 __atomic_acquire_fence();
1344 return ret;
1345}
1346#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1347#endif
1348
1349#ifndef atomic64_fetch_sub_release
1350static inline s64
1351atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1352{
1353 __atomic_release_fence();
1354 return atomic64_fetch_sub_relaxed(i, v);
1355}
1356#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1357#endif
1358
1359#ifndef atomic64_fetch_sub
1360static inline s64
1361atomic64_fetch_sub(s64 i, atomic64_t *v)
1362{
1363 s64 ret;
1364 __atomic_pre_full_fence();
1365 ret = atomic64_fetch_sub_relaxed(i, v);
1366 __atomic_post_full_fence();
1367 return ret;
1368}
1369#define atomic64_fetch_sub atomic64_fetch_sub
1370#endif
1371
1372#endif /* atomic64_fetch_sub_relaxed */
1373
1374#ifndef atomic64_inc
1375static inline void
1376atomic64_inc(atomic64_t *v)
1377{
1378 atomic64_add(1, v);
1379}
1380#define atomic64_inc atomic64_inc
1381#endif
1382
1383#ifndef atomic64_inc_return_relaxed
1384#ifdef atomic64_inc_return
1385#define atomic64_inc_return_acquire atomic64_inc_return
1386#define atomic64_inc_return_release atomic64_inc_return
1387#define atomic64_inc_return_relaxed atomic64_inc_return
1388#endif /* atomic64_inc_return */
1389
1390#ifndef atomic64_inc_return
1391static inline s64
1392atomic64_inc_return(atomic64_t *v)
1393{
1394 return atomic64_add_return(1, v);
1395}
1396#define atomic64_inc_return atomic64_inc_return
1397#endif
1398
1399#ifndef atomic64_inc_return_acquire
1400static inline s64
1401atomic64_inc_return_acquire(atomic64_t *v)
1402{
1403 return atomic64_add_return_acquire(1, v);
1404}
1405#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1406#endif
1407
1408#ifndef atomic64_inc_return_release
1409static inline s64
1410atomic64_inc_return_release(atomic64_t *v)
1411{
1412 return atomic64_add_return_release(1, v);
1413}
1414#define atomic64_inc_return_release atomic64_inc_return_release
1415#endif
1416
1417#ifndef atomic64_inc_return_relaxed
1418static inline s64
1419atomic64_inc_return_relaxed(atomic64_t *v)
1420{
1421 return atomic64_add_return_relaxed(1, v);
1422}
1423#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1424#endif
1425
1426#else /* atomic64_inc_return_relaxed */
1427
1428#ifndef atomic64_inc_return_acquire
1429static inline s64
1430atomic64_inc_return_acquire(atomic64_t *v)
1431{
1432 s64 ret = atomic64_inc_return_relaxed(v);
1433 __atomic_acquire_fence();
1434 return ret;
1435}
1436#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1437#endif
1438
1439#ifndef atomic64_inc_return_release
1440static inline s64
1441atomic64_inc_return_release(atomic64_t *v)
1442{
1443 __atomic_release_fence();
1444 return atomic64_inc_return_relaxed(v);
1445}
1446#define atomic64_inc_return_release atomic64_inc_return_release
1447#endif
1448
1449#ifndef atomic64_inc_return
1450static inline s64
1451atomic64_inc_return(atomic64_t *v)
1452{
1453 s64 ret;
1454 __atomic_pre_full_fence();
1455 ret = atomic64_inc_return_relaxed(v);
1456 __atomic_post_full_fence();
1457 return ret;
1458}
1459#define atomic64_inc_return atomic64_inc_return
1460#endif
1461
1462#endif /* atomic64_inc_return_relaxed */
1463
1464#ifndef atomic64_fetch_inc_relaxed
1465#ifdef atomic64_fetch_inc
1466#define atomic64_fetch_inc_acquire atomic64_fetch_inc
1467#define atomic64_fetch_inc_release atomic64_fetch_inc
1468#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1469#endif /* atomic64_fetch_inc */
1470
1471#ifndef atomic64_fetch_inc
1472static inline s64
1473atomic64_fetch_inc(atomic64_t *v)
1474{
1475 return atomic64_fetch_add(1, v);
1476}
1477#define atomic64_fetch_inc atomic64_fetch_inc
1478#endif
1479
1480#ifndef atomic64_fetch_inc_acquire
1481static inline s64
1482atomic64_fetch_inc_acquire(atomic64_t *v)
1483{
1484 return atomic64_fetch_add_acquire(1, v);
1485}
1486#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1487#endif
1488
1489#ifndef atomic64_fetch_inc_release
1490static inline s64
1491atomic64_fetch_inc_release(atomic64_t *v)
1492{
1493 return atomic64_fetch_add_release(1, v);
1494}
1495#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1496#endif
1497
1498#ifndef atomic64_fetch_inc_relaxed
1499static inline s64
1500atomic64_fetch_inc_relaxed(atomic64_t *v)
1501{
1502 return atomic64_fetch_add_relaxed(1, v);
1503}
1504#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1505#endif
1506
1507#else /* atomic64_fetch_inc_relaxed */
1508
1509#ifndef atomic64_fetch_inc_acquire
1510static inline s64
1511atomic64_fetch_inc_acquire(atomic64_t *v)
1512{
1513 s64 ret = atomic64_fetch_inc_relaxed(v);
1514 __atomic_acquire_fence();
1515 return ret;
1516}
1517#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1518#endif
1519
1520#ifndef atomic64_fetch_inc_release
1521static inline s64
1522atomic64_fetch_inc_release(atomic64_t *v)
1523{
1524 __atomic_release_fence();
1525 return atomic64_fetch_inc_relaxed(v);
1526}
1527#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1528#endif
1529
1530#ifndef atomic64_fetch_inc
1531static inline s64
1532atomic64_fetch_inc(atomic64_t *v)
1533{
1534 s64 ret;
1535 __atomic_pre_full_fence();
1536 ret = atomic64_fetch_inc_relaxed(v);
1537 __atomic_post_full_fence();
1538 return ret;
1539}
1540#define atomic64_fetch_inc atomic64_fetch_inc
1541#endif
1542
1543#endif /* atomic64_fetch_inc_relaxed */
1544
1545#ifndef atomic64_dec
1546static inline void
1547atomic64_dec(atomic64_t *v)
1548{
1549 atomic64_sub(1, v);
1550}
1551#define atomic64_dec atomic64_dec
1552#endif
1553
1554#ifndef atomic64_dec_return_relaxed
1555#ifdef atomic64_dec_return
1556#define atomic64_dec_return_acquire atomic64_dec_return
1557#define atomic64_dec_return_release atomic64_dec_return
1558#define atomic64_dec_return_relaxed atomic64_dec_return
1559#endif /* atomic64_dec_return */
1560
1561#ifndef atomic64_dec_return
1562static inline s64
1563atomic64_dec_return(atomic64_t *v)
1564{
1565 return atomic64_sub_return(1, v);
1566}
1567#define atomic64_dec_return atomic64_dec_return
1568#endif
1569
1570#ifndef atomic64_dec_return_acquire
1571static inline s64
1572atomic64_dec_return_acquire(atomic64_t *v)
1573{
1574 return atomic64_sub_return_acquire(1, v);
1575}
1576#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1577#endif
1578
1579#ifndef atomic64_dec_return_release
1580static inline s64
1581atomic64_dec_return_release(atomic64_t *v)
1582{
1583 return atomic64_sub_return_release(1, v);
1584}
1585#define atomic64_dec_return_release atomic64_dec_return_release
1586#endif
1587
1588#ifndef atomic64_dec_return_relaxed
1589static inline s64
1590atomic64_dec_return_relaxed(atomic64_t *v)
1591{
1592 return atomic64_sub_return_relaxed(1, v);
1593}
1594#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1595#endif
1596
1597#else /* atomic64_dec_return_relaxed */
1598
1599#ifndef atomic64_dec_return_acquire
1600static inline s64
1601atomic64_dec_return_acquire(atomic64_t *v)
1602{
1603 s64 ret = atomic64_dec_return_relaxed(v);
1604 __atomic_acquire_fence();
1605 return ret;
1606}
1607#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1608#endif
1609
1610#ifndef atomic64_dec_return_release
1611static inline s64
1612atomic64_dec_return_release(atomic64_t *v)
1613{
1614 __atomic_release_fence();
1615 return atomic64_dec_return_relaxed(v);
1616}
1617#define atomic64_dec_return_release atomic64_dec_return_release
1618#endif
1619
1620#ifndef atomic64_dec_return
1621static inline s64
1622atomic64_dec_return(atomic64_t *v)
1623{
1624 s64 ret;
1625 __atomic_pre_full_fence();
1626 ret = atomic64_dec_return_relaxed(v);
1627 __atomic_post_full_fence();
1628 return ret;
1629}
1630#define atomic64_dec_return atomic64_dec_return
1631#endif
1632
1633#endif /* atomic64_dec_return_relaxed */
1634
1635#ifndef atomic64_fetch_dec_relaxed
1636#ifdef atomic64_fetch_dec
1637#define atomic64_fetch_dec_acquire atomic64_fetch_dec
1638#define atomic64_fetch_dec_release atomic64_fetch_dec
1639#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1640#endif /* atomic64_fetch_dec */
1641
1642#ifndef atomic64_fetch_dec
1643static inline s64
1644atomic64_fetch_dec(atomic64_t *v)
1645{
1646 return atomic64_fetch_sub(1, v);
1647}
1648#define atomic64_fetch_dec atomic64_fetch_dec
1649#endif
1650
1651#ifndef atomic64_fetch_dec_acquire
1652static inline s64
1653atomic64_fetch_dec_acquire(atomic64_t *v)
1654{
1655 return atomic64_fetch_sub_acquire(1, v);
1656}
1657#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1658#endif
1659
1660#ifndef atomic64_fetch_dec_release
1661static inline s64
1662atomic64_fetch_dec_release(atomic64_t *v)
1663{
1664 return atomic64_fetch_sub_release(1, v);
1665}
1666#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1667#endif
1668
1669#ifndef atomic64_fetch_dec_relaxed
1670static inline s64
1671atomic64_fetch_dec_relaxed(atomic64_t *v)
1672{
1673 return atomic64_fetch_sub_relaxed(1, v);
1674}
1675#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1676#endif
1677
1678#else /* atomic64_fetch_dec_relaxed */
1679
1680#ifndef atomic64_fetch_dec_acquire
1681static inline s64
1682atomic64_fetch_dec_acquire(atomic64_t *v)
1683{
1684 s64 ret = atomic64_fetch_dec_relaxed(v);
1685 __atomic_acquire_fence();
1686 return ret;
1687}
1688#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1689#endif
1690
1691#ifndef atomic64_fetch_dec_release
1692static inline s64
1693atomic64_fetch_dec_release(atomic64_t *v)
1694{
1695 __atomic_release_fence();
1696 return atomic64_fetch_dec_relaxed(v);
1697}
1698#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1699#endif
1700
1701#ifndef atomic64_fetch_dec
1702static inline s64
1703atomic64_fetch_dec(atomic64_t *v)
1704{
1705 s64 ret;
1706 __atomic_pre_full_fence();
1707 ret = atomic64_fetch_dec_relaxed(v);
1708 __atomic_post_full_fence();
1709 return ret;
1710}
1711#define atomic64_fetch_dec atomic64_fetch_dec
1712#endif
1713
1714#endif /* atomic64_fetch_dec_relaxed */
1715
1716#ifndef atomic64_fetch_and_relaxed
1717#define atomic64_fetch_and_acquire atomic64_fetch_and
1718#define atomic64_fetch_and_release atomic64_fetch_and
1719#define atomic64_fetch_and_relaxed atomic64_fetch_and
1720#else /* atomic64_fetch_and_relaxed */
1721
1722#ifndef atomic64_fetch_and_acquire
1723static inline s64
1724atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1725{
1726 s64 ret = atomic64_fetch_and_relaxed(i, v);
1727 __atomic_acquire_fence();
1728 return ret;
1729}
1730#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1731#endif
1732
1733#ifndef atomic64_fetch_and_release
1734static inline s64
1735atomic64_fetch_and_release(s64 i, atomic64_t *v)
1736{
1737 __atomic_release_fence();
1738 return atomic64_fetch_and_relaxed(i, v);
1739}
1740#define atomic64_fetch_and_release atomic64_fetch_and_release
1741#endif
1742
1743#ifndef atomic64_fetch_and
1744static inline s64
1745atomic64_fetch_and(s64 i, atomic64_t *v)
1746{
1747 s64 ret;
1748 __atomic_pre_full_fence();
1749 ret = atomic64_fetch_and_relaxed(i, v);
1750 __atomic_post_full_fence();
1751 return ret;
1752}
1753#define atomic64_fetch_and atomic64_fetch_and
1754#endif
1755
1756#endif /* atomic64_fetch_and_relaxed */
1757
1758#ifndef atomic64_andnot
1759static inline void
1760atomic64_andnot(s64 i, atomic64_t *v)
1761{
1762 atomic64_and(~i, v);
1763}
1764#define atomic64_andnot atomic64_andnot
1765#endif
1766
1767#ifndef atomic64_fetch_andnot_relaxed
1768#ifdef atomic64_fetch_andnot
1769#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1770#define atomic64_fetch_andnot_release atomic64_fetch_andnot
1771#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1772#endif /* atomic64_fetch_andnot */
1773
1774#ifndef atomic64_fetch_andnot
1775static inline s64
1776atomic64_fetch_andnot(s64 i, atomic64_t *v)
1777{
1778 return atomic64_fetch_and(~i, v);
1779}
1780#define atomic64_fetch_andnot atomic64_fetch_andnot
1781#endif
1782
1783#ifndef atomic64_fetch_andnot_acquire
1784static inline s64
1785atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1786{
1787 return atomic64_fetch_and_acquire(~i, v);
1788}
1789#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1790#endif
1791
1792#ifndef atomic64_fetch_andnot_release
1793static inline s64
1794atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1795{
1796 return atomic64_fetch_and_release(~i, v);
1797}
1798#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1799#endif
1800
1801#ifndef atomic64_fetch_andnot_relaxed
1802static inline s64
1803atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1804{
1805 return atomic64_fetch_and_relaxed(~i, v);
1806}
1807#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1808#endif
1809
1810#else /* atomic64_fetch_andnot_relaxed */
1811
1812#ifndef atomic64_fetch_andnot_acquire
1813static inline s64
1814atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1815{
1816 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
1817 __atomic_acquire_fence();
1818 return ret;
1819}
1820#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1821#endif
1822
1823#ifndef atomic64_fetch_andnot_release
1824static inline s64
1825atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1826{
1827 __atomic_release_fence();
1828 return atomic64_fetch_andnot_relaxed(i, v);
1829}
1830#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1831#endif
1832
1833#ifndef atomic64_fetch_andnot
1834static inline s64
1835atomic64_fetch_andnot(s64 i, atomic64_t *v)
1836{
1837 s64 ret;
1838 __atomic_pre_full_fence();
1839 ret = atomic64_fetch_andnot_relaxed(i, v);
1840 __atomic_post_full_fence();
1841 return ret;
1842}
1843#define atomic64_fetch_andnot atomic64_fetch_andnot
1844#endif
1845
1846#endif /* atomic64_fetch_andnot_relaxed */
1847
1848#ifndef atomic64_fetch_or_relaxed
1849#define atomic64_fetch_or_acquire atomic64_fetch_or
1850#define atomic64_fetch_or_release atomic64_fetch_or
1851#define atomic64_fetch_or_relaxed atomic64_fetch_or
1852#else /* atomic64_fetch_or_relaxed */
1853
1854#ifndef atomic64_fetch_or_acquire
1855static inline s64
1856atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1857{
1858 s64 ret = atomic64_fetch_or_relaxed(i, v);
1859 __atomic_acquire_fence();
1860 return ret;
1861}
1862#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1863#endif
1864
1865#ifndef atomic64_fetch_or_release
1866static inline s64
1867atomic64_fetch_or_release(s64 i, atomic64_t *v)
1868{
1869 __atomic_release_fence();
1870 return atomic64_fetch_or_relaxed(i, v);
1871}
1872#define atomic64_fetch_or_release atomic64_fetch_or_release
1873#endif
1874
1875#ifndef atomic64_fetch_or
1876static inline s64
1877atomic64_fetch_or(s64 i, atomic64_t *v)
1878{
1879 s64 ret;
1880 __atomic_pre_full_fence();
1881 ret = atomic64_fetch_or_relaxed(i, v);
1882 __atomic_post_full_fence();
1883 return ret;
1884}
1885#define atomic64_fetch_or atomic64_fetch_or
1886#endif
1887
1888#endif /* atomic64_fetch_or_relaxed */
1889
1890#ifndef atomic64_fetch_xor_relaxed
1891#define atomic64_fetch_xor_acquire atomic64_fetch_xor
1892#define atomic64_fetch_xor_release atomic64_fetch_xor
1893#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1894#else /* atomic64_fetch_xor_relaxed */
1895
1896#ifndef atomic64_fetch_xor_acquire
1897static inline s64
1898atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1899{
1900 s64 ret = atomic64_fetch_xor_relaxed(i, v);
1901 __atomic_acquire_fence();
1902 return ret;
1903}
1904#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1905#endif
1906
1907#ifndef atomic64_fetch_xor_release
1908static inline s64
1909atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1910{
1911 __atomic_release_fence();
1912 return atomic64_fetch_xor_relaxed(i, v);
1913}
1914#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1915#endif
1916
1917#ifndef atomic64_fetch_xor
1918static inline s64
1919atomic64_fetch_xor(s64 i, atomic64_t *v)
1920{
1921 s64 ret;
1922 __atomic_pre_full_fence();
1923 ret = atomic64_fetch_xor_relaxed(i, v);
1924 __atomic_post_full_fence();
1925 return ret;
1926}
1927#define atomic64_fetch_xor atomic64_fetch_xor
1928#endif
1929
1930#endif /* atomic64_fetch_xor_relaxed */
1931
1932#ifndef atomic64_xchg_relaxed
1933#define atomic64_xchg_acquire atomic64_xchg
1934#define atomic64_xchg_release atomic64_xchg
1935#define atomic64_xchg_relaxed atomic64_xchg
1936#else /* atomic64_xchg_relaxed */
1937
1938#ifndef atomic64_xchg_acquire
1939static inline s64
1940atomic64_xchg_acquire(atomic64_t *v, s64 i)
1941{
1942 s64 ret = atomic64_xchg_relaxed(v, i);
1943 __atomic_acquire_fence();
1944 return ret;
1945}
1946#define atomic64_xchg_acquire atomic64_xchg_acquire
1947#endif
1948
1949#ifndef atomic64_xchg_release
1950static inline s64
1951atomic64_xchg_release(atomic64_t *v, s64 i)
1952{
1953 __atomic_release_fence();
1954 return atomic64_xchg_relaxed(v, i);
1955}
1956#define atomic64_xchg_release atomic64_xchg_release
1957#endif
1958
1959#ifndef atomic64_xchg
1960static inline s64
1961atomic64_xchg(atomic64_t *v, s64 i)
1962{
1963 s64 ret;
1964 __atomic_pre_full_fence();
1965 ret = atomic64_xchg_relaxed(v, i);
1966 __atomic_post_full_fence();
1967 return ret;
1968}
1969#define atomic64_xchg atomic64_xchg
1970#endif
1971
1972#endif /* atomic64_xchg_relaxed */
1973
1974#ifndef atomic64_cmpxchg_relaxed
1975#define atomic64_cmpxchg_acquire atomic64_cmpxchg
1976#define atomic64_cmpxchg_release atomic64_cmpxchg
1977#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1978#else /* atomic64_cmpxchg_relaxed */
1979
1980#ifndef atomic64_cmpxchg_acquire
1981static inline s64
1982atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1983{
1984 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
1985 __atomic_acquire_fence();
1986 return ret;
1987}
1988#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1989#endif
1990
1991#ifndef atomic64_cmpxchg_release
1992static inline s64
1993atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1994{
1995 __atomic_release_fence();
1996 return atomic64_cmpxchg_relaxed(v, old, new);
1997}
1998#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1999#endif
2000
2001#ifndef atomic64_cmpxchg
2002static inline s64
2003atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2004{
2005 s64 ret;
2006 __atomic_pre_full_fence();
2007 ret = atomic64_cmpxchg_relaxed(v, old, new);
2008 __atomic_post_full_fence();
2009 return ret;
2010}
2011#define atomic64_cmpxchg atomic64_cmpxchg
2012#endif
2013
2014#endif /* atomic64_cmpxchg_relaxed */
2015
2016#ifndef atomic64_try_cmpxchg_relaxed
2017#ifdef atomic64_try_cmpxchg
2018#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2019#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2020#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2021#endif /* atomic64_try_cmpxchg */
2022
2023#ifndef atomic64_try_cmpxchg
2024static inline bool
2025atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2026{
2027 s64 r, o = *old;
2028 r = atomic64_cmpxchg(v, o, new);
2029 if (unlikely(r != o))
2030 *old = r;
2031 return likely(r == o);
2032}
2033#define atomic64_try_cmpxchg atomic64_try_cmpxchg
2034#endif
2035
2036#ifndef atomic64_try_cmpxchg_acquire
2037static inline bool
2038atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2039{
2040 s64 r, o = *old;
2041 r = atomic64_cmpxchg_acquire(v, o, new);
2042 if (unlikely(r != o))
2043 *old = r;
2044 return likely(r == o);
2045}
2046#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2047#endif
2048
2049#ifndef atomic64_try_cmpxchg_release
2050static inline bool
2051atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2052{
2053 s64 r, o = *old;
2054 r = atomic64_cmpxchg_release(v, o, new);
2055 if (unlikely(r != o))
2056 *old = r;
2057 return likely(r == o);
2058}
2059#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2060#endif
2061
2062#ifndef atomic64_try_cmpxchg_relaxed
2063static inline bool
2064atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2065{
2066 s64 r, o = *old;
2067 r = atomic64_cmpxchg_relaxed(v, o, new);
2068 if (unlikely(r != o))
2069 *old = r;
2070 return likely(r == o);
2071}
2072#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2073#endif
2074
2075#else /* atomic64_try_cmpxchg_relaxed */
2076
2077#ifndef atomic64_try_cmpxchg_acquire
2078static inline bool
2079atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2080{
2081 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2082 __atomic_acquire_fence();
2083 return ret;
2084}
2085#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2086#endif
2087
2088#ifndef atomic64_try_cmpxchg_release
2089static inline bool
2090atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2091{
2092 __atomic_release_fence();
2093 return atomic64_try_cmpxchg_relaxed(v, old, new);
2094}
2095#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2096#endif
2097
2098#ifndef atomic64_try_cmpxchg
2099static inline bool
2100atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2101{
2102 bool ret;
2103 __atomic_pre_full_fence();
2104 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2105 __atomic_post_full_fence();
2106 return ret;
2107}
2108#define atomic64_try_cmpxchg atomic64_try_cmpxchg
2109#endif
2110
2111#endif /* atomic64_try_cmpxchg_relaxed */
2112
2113#ifndef atomic64_sub_and_test
2114/**
2115 * atomic64_sub_and_test - subtract value from variable and test result
2116 * @i: integer value to subtract
2117 * @v: pointer of type atomic64_t
2118 *
2119 * Atomically subtracts @i from @v and returns
2120 * true if the result is zero, or false for all
2121 * other cases.
2122 */
2123static inline bool
2124atomic64_sub_and_test(s64 i, atomic64_t *v)
2125{
2126 return atomic64_sub_return(i, v) == 0;
2127}
2128#define atomic64_sub_and_test atomic64_sub_and_test
2129#endif
2130
2131#ifndef atomic64_dec_and_test
2132/**
2133 * atomic64_dec_and_test - decrement and test
2134 * @v: pointer of type atomic64_t
2135 *
2136 * Atomically decrements @v by 1 and
2137 * returns true if the result is 0, or false for all other
2138 * cases.
2139 */
2140static inline bool
2141atomic64_dec_and_test(atomic64_t *v)
2142{
2143 return atomic64_dec_return(v) == 0;
2144}
2145#define atomic64_dec_and_test atomic64_dec_and_test
2146#endif
2147
2148#ifndef atomic64_inc_and_test
2149/**
2150 * atomic64_inc_and_test - increment and test
2151 * @v: pointer of type atomic64_t
2152 *
2153 * Atomically increments @v by 1
2154 * and returns true if the result is zero, or false for all
2155 * other cases.
2156 */
2157static inline bool
2158atomic64_inc_and_test(atomic64_t *v)
2159{
2160 return atomic64_inc_return(v) == 0;
2161}
2162#define atomic64_inc_and_test atomic64_inc_and_test
2163#endif
2164
2165#ifndef atomic64_add_negative
2166/**
2167 * atomic64_add_negative - add and test if negative
2168 * @i: integer value to add
2169 * @v: pointer of type atomic64_t
2170 *
2171 * Atomically adds @i to @v and returns true
2172 * if the result is negative, or false when
2173 * result is greater than or equal to zero.
2174 */
2175static inline bool
2176atomic64_add_negative(s64 i, atomic64_t *v)
2177{
2178 return atomic64_add_return(i, v) < 0;
2179}
2180#define atomic64_add_negative atomic64_add_negative
2181#endif
2182
2183#ifndef atomic64_fetch_add_unless
2184/**
2185 * atomic64_fetch_add_unless - add unless the number is already a given value
2186 * @v: pointer of type atomic64_t
2187 * @a: the amount to add to v...
2188 * @u: ...unless v is equal to u.
2189 *
2190 * Atomically adds @a to @v, so long as @v was not already @u.
2191 * Returns original value of @v
2192 */
2193static inline s64
2194atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2195{
2196 s64 c = atomic64_read(v);
2197
2198 do {
2199 if (unlikely(c == u))
2200 break;
2201 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2202
2203 return c;
2204}
2205#define atomic64_fetch_add_unless atomic64_fetch_add_unless
2206#endif
2207
2208#ifndef atomic64_add_unless
2209/**
2210 * atomic64_add_unless - add unless the number is already a given value
2211 * @v: pointer of type atomic64_t
2212 * @a: the amount to add to v...
2213 * @u: ...unless v is equal to u.
2214 *
2215 * Atomically adds @a to @v, if @v was not already @u.
2216 * Returns true if the addition was done.
2217 */
2218static inline bool
2219atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2220{
2221 return atomic64_fetch_add_unless(v, a, u) != u;
2222}
2223#define atomic64_add_unless atomic64_add_unless
2224#endif
2225
2226#ifndef atomic64_inc_not_zero
2227/**
2228 * atomic64_inc_not_zero - increment unless the number is zero
2229 * @v: pointer of type atomic64_t
2230 *
2231 * Atomically increments @v by 1, if @v is non-zero.
2232 * Returns true if the increment was done.
2233 */
2234static inline bool
2235atomic64_inc_not_zero(atomic64_t *v)
2236{
2237 return atomic64_add_unless(v, 1, 0);
2238}
2239#define atomic64_inc_not_zero atomic64_inc_not_zero
2240#endif
2241
2242#ifndef atomic64_inc_unless_negative
2243static inline bool
2244atomic64_inc_unless_negative(atomic64_t *v)
2245{
2246 s64 c = atomic64_read(v);
2247
2248 do {
2249 if (unlikely(c < 0))
2250 return false;
2251 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2252
2253 return true;
2254}
2255#define atomic64_inc_unless_negative atomic64_inc_unless_negative
2256#endif
2257
2258#ifndef atomic64_dec_unless_positive
2259static inline bool
2260atomic64_dec_unless_positive(atomic64_t *v)
2261{
2262 s64 c = atomic64_read(v);
2263
2264 do {
2265 if (unlikely(c > 0))
2266 return false;
2267 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2268
2269 return true;
2270}
2271#define atomic64_dec_unless_positive atomic64_dec_unless_positive
2272#endif
2273
2274#ifndef atomic64_dec_if_positive
2275static inline s64
2276atomic64_dec_if_positive(atomic64_t *v)
2277{
2278 s64 dec, c = atomic64_read(v);
2279
2280 do {
2281 dec = c - 1;
2282 if (unlikely(dec < 0))
2283 break;
2284 } while (!atomic64_try_cmpxchg(v, &c, dec));
2285
2286 return dec;
2287}
2288#define atomic64_dec_if_positive atomic64_dec_if_positive
2289#endif
2290
2291#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
2292#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
2293
2294#endif /* _LINUX_ATOMIC_FALLBACK_H */
2295// 25de4a2804d70f57e994fe3b419148658bb5378a
2296