1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-long.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_LONG_H
7 #define _LINUX_ATOMIC_LONG_H
9 #include <linux/compiler.h>
10 #include <asm/types.h>
13 typedef atomic64_t atomic_long_t;
14 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
15 #define atomic_long_cond_read_acquire atomic64_cond_read_acquire
16 #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
18 typedef atomic_t atomic_long_t;
19 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
20 #define atomic_long_cond_read_acquire atomic_cond_read_acquire
21 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
25 * raw_atomic_long_read() - atomic load with relaxed ordering
26 * @v: pointer to atomic_long_t
28 * Atomically loads the value of @v with relaxed ordering.
30 * Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
32 * Return: The value loaded from @v.
34 static __always_inline long
35 raw_atomic_long_read(const atomic_long_t *v)
38 return raw_atomic64_read(v);
40 return raw_atomic_read(v);
45 * raw_atomic_long_read_acquire() - atomic load with acquire ordering
46 * @v: pointer to atomic_long_t
48 * Atomically loads the value of @v with acquire ordering.
50 * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
52 * Return: The value loaded from @v.
54 static __always_inline long
55 raw_atomic_long_read_acquire(const atomic_long_t *v)
58 return raw_atomic64_read_acquire(v);
60 return raw_atomic_read_acquire(v);
65 * raw_atomic_long_set() - atomic set with relaxed ordering
66 * @v: pointer to atomic_long_t
67 * @i: long value to assign
69 * Atomically sets @v to @i with relaxed ordering.
71 * Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
75 static __always_inline void
76 raw_atomic_long_set(atomic_long_t *v, long i)
79 raw_atomic64_set(v, i);
86 * raw_atomic_long_set_release() - atomic set with release ordering
87 * @v: pointer to atomic_long_t
88 * @i: long value to assign
90 * Atomically sets @v to @i with release ordering.
92 * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
96 static __always_inline void
97 raw_atomic_long_set_release(atomic_long_t *v, long i)
100 raw_atomic64_set_release(v, i);
102 raw_atomic_set_release(v, i);
107 * raw_atomic_long_add() - atomic add with relaxed ordering
108 * @i: long value to add
109 * @v: pointer to atomic_long_t
111 * Atomically updates @v to (@v + @i) with relaxed ordering.
113 * Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
117 static __always_inline void
118 raw_atomic_long_add(long i, atomic_long_t *v)
121 raw_atomic64_add(i, v);
123 raw_atomic_add(i, v);
128 * raw_atomic_long_add_return() - atomic add with full ordering
129 * @i: long value to add
130 * @v: pointer to atomic_long_t
132 * Atomically updates @v to (@v + @i) with full ordering.
134 * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
136 * Return: The updated value of @v.
138 static __always_inline long
139 raw_atomic_long_add_return(long i, atomic_long_t *v)
142 return raw_atomic64_add_return(i, v);
144 return raw_atomic_add_return(i, v);
149 * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
150 * @i: long value to add
151 * @v: pointer to atomic_long_t
153 * Atomically updates @v to (@v + @i) with acquire ordering.
155 * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
157 * Return: The updated value of @v.
159 static __always_inline long
160 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
163 return raw_atomic64_add_return_acquire(i, v);
165 return raw_atomic_add_return_acquire(i, v);
170 * raw_atomic_long_add_return_release() - atomic add with release ordering
171 * @i: long value to add
172 * @v: pointer to atomic_long_t
174 * Atomically updates @v to (@v + @i) with release ordering.
176 * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
178 * Return: The updated value of @v.
180 static __always_inline long
181 raw_atomic_long_add_return_release(long i, atomic_long_t *v)
184 return raw_atomic64_add_return_release(i, v);
186 return raw_atomic_add_return_release(i, v);
191 * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
192 * @i: long value to add
193 * @v: pointer to atomic_long_t
195 * Atomically updates @v to (@v + @i) with relaxed ordering.
197 * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
199 * Return: The updated value of @v.
201 static __always_inline long
202 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
205 return raw_atomic64_add_return_relaxed(i, v);
207 return raw_atomic_add_return_relaxed(i, v);
212 * raw_atomic_long_fetch_add() - atomic add with full ordering
213 * @i: long value to add
214 * @v: pointer to atomic_long_t
216 * Atomically updates @v to (@v + @i) with full ordering.
218 * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
220 * Return: The original value of @v.
222 static __always_inline long
223 raw_atomic_long_fetch_add(long i, atomic_long_t *v)
226 return raw_atomic64_fetch_add(i, v);
228 return raw_atomic_fetch_add(i, v);
233 * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
234 * @i: long value to add
235 * @v: pointer to atomic_long_t
237 * Atomically updates @v to (@v + @i) with acquire ordering.
239 * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
241 * Return: The original value of @v.
243 static __always_inline long
244 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
247 return raw_atomic64_fetch_add_acquire(i, v);
249 return raw_atomic_fetch_add_acquire(i, v);
254 * raw_atomic_long_fetch_add_release() - atomic add with release ordering
255 * @i: long value to add
256 * @v: pointer to atomic_long_t
258 * Atomically updates @v to (@v + @i) with release ordering.
260 * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
262 * Return: The original value of @v.
264 static __always_inline long
265 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
268 return raw_atomic64_fetch_add_release(i, v);
270 return raw_atomic_fetch_add_release(i, v);
275 * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
276 * @i: long value to add
277 * @v: pointer to atomic_long_t
279 * Atomically updates @v to (@v + @i) with relaxed ordering.
281 * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
283 * Return: The original value of @v.
285 static __always_inline long
286 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
289 return raw_atomic64_fetch_add_relaxed(i, v);
291 return raw_atomic_fetch_add_relaxed(i, v);
296 * raw_atomic_long_sub() - atomic subtract with relaxed ordering
297 * @i: long value to subtract
298 * @v: pointer to atomic_long_t
300 * Atomically updates @v to (@v - @i) with relaxed ordering.
302 * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
306 static __always_inline void
307 raw_atomic_long_sub(long i, atomic_long_t *v)
310 raw_atomic64_sub(i, v);
312 raw_atomic_sub(i, v);
317 * raw_atomic_long_sub_return() - atomic subtract with full ordering
318 * @i: long value to subtract
319 * @v: pointer to atomic_long_t
321 * Atomically updates @v to (@v - @i) with full ordering.
323 * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
325 * Return: The updated value of @v.
327 static __always_inline long
328 raw_atomic_long_sub_return(long i, atomic_long_t *v)
331 return raw_atomic64_sub_return(i, v);
333 return raw_atomic_sub_return(i, v);
338 * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
339 * @i: long value to subtract
340 * @v: pointer to atomic_long_t
342 * Atomically updates @v to (@v - @i) with acquire ordering.
344 * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
346 * Return: The updated value of @v.
348 static __always_inline long
349 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
352 return raw_atomic64_sub_return_acquire(i, v);
354 return raw_atomic_sub_return_acquire(i, v);
359 * raw_atomic_long_sub_return_release() - atomic subtract with release ordering
360 * @i: long value to subtract
361 * @v: pointer to atomic_long_t
363 * Atomically updates @v to (@v - @i) with release ordering.
365 * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
367 * Return: The updated value of @v.
369 static __always_inline long
370 raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
373 return raw_atomic64_sub_return_release(i, v);
375 return raw_atomic_sub_return_release(i, v);
380 * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
381 * @i: long value to subtract
382 * @v: pointer to atomic_long_t
384 * Atomically updates @v to (@v - @i) with relaxed ordering.
386 * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
388 * Return: The updated value of @v.
390 static __always_inline long
391 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
394 return raw_atomic64_sub_return_relaxed(i, v);
396 return raw_atomic_sub_return_relaxed(i, v);
401 * raw_atomic_long_fetch_sub() - atomic subtract with full ordering
402 * @i: long value to subtract
403 * @v: pointer to atomic_long_t
405 * Atomically updates @v to (@v - @i) with full ordering.
407 * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
409 * Return: The original value of @v.
411 static __always_inline long
412 raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
415 return raw_atomic64_fetch_sub(i, v);
417 return raw_atomic_fetch_sub(i, v);
422 * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
423 * @i: long value to subtract
424 * @v: pointer to atomic_long_t
426 * Atomically updates @v to (@v - @i) with acquire ordering.
428 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
430 * Return: The original value of @v.
432 static __always_inline long
433 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
436 return raw_atomic64_fetch_sub_acquire(i, v);
438 return raw_atomic_fetch_sub_acquire(i, v);
443 * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
444 * @i: long value to subtract
445 * @v: pointer to atomic_long_t
447 * Atomically updates @v to (@v - @i) with release ordering.
449 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
451 * Return: The original value of @v.
453 static __always_inline long
454 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
457 return raw_atomic64_fetch_sub_release(i, v);
459 return raw_atomic_fetch_sub_release(i, v);
464 * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
465 * @i: long value to subtract
466 * @v: pointer to atomic_long_t
468 * Atomically updates @v to (@v - @i) with relaxed ordering.
470 * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
472 * Return: The original value of @v.
474 static __always_inline long
475 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
478 return raw_atomic64_fetch_sub_relaxed(i, v);
480 return raw_atomic_fetch_sub_relaxed(i, v);
485 * raw_atomic_long_inc() - atomic increment with relaxed ordering
486 * @v: pointer to atomic_long_t
488 * Atomically updates @v to (@v + 1) with relaxed ordering.
490 * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
494 static __always_inline void
495 raw_atomic_long_inc(atomic_long_t *v)
505 * raw_atomic_long_inc_return() - atomic increment with full ordering
506 * @v: pointer to atomic_long_t
508 * Atomically updates @v to (@v + 1) with full ordering.
510 * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
512 * Return: The updated value of @v.
514 static __always_inline long
515 raw_atomic_long_inc_return(atomic_long_t *v)
518 return raw_atomic64_inc_return(v);
520 return raw_atomic_inc_return(v);
525 * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
526 * @v: pointer to atomic_long_t
528 * Atomically updates @v to (@v + 1) with acquire ordering.
530 * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
532 * Return: The updated value of @v.
534 static __always_inline long
535 raw_atomic_long_inc_return_acquire(atomic_long_t *v)
538 return raw_atomic64_inc_return_acquire(v);
540 return raw_atomic_inc_return_acquire(v);
545 * raw_atomic_long_inc_return_release() - atomic increment with release ordering
546 * @v: pointer to atomic_long_t
548 * Atomically updates @v to (@v + 1) with release ordering.
550 * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
552 * Return: The updated value of @v.
554 static __always_inline long
555 raw_atomic_long_inc_return_release(atomic_long_t *v)
558 return raw_atomic64_inc_return_release(v);
560 return raw_atomic_inc_return_release(v);
565 * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
566 * @v: pointer to atomic_long_t
568 * Atomically updates @v to (@v + 1) with relaxed ordering.
570 * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
572 * Return: The updated value of @v.
574 static __always_inline long
575 raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
578 return raw_atomic64_inc_return_relaxed(v);
580 return raw_atomic_inc_return_relaxed(v);
585 * raw_atomic_long_fetch_inc() - atomic increment with full ordering
586 * @v: pointer to atomic_long_t
588 * Atomically updates @v to (@v + 1) with full ordering.
590 * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
592 * Return: The original value of @v.
594 static __always_inline long
595 raw_atomic_long_fetch_inc(atomic_long_t *v)
598 return raw_atomic64_fetch_inc(v);
600 return raw_atomic_fetch_inc(v);
605 * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
606 * @v: pointer to atomic_long_t
608 * Atomically updates @v to (@v + 1) with acquire ordering.
610 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
612 * Return: The original value of @v.
614 static __always_inline long
615 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
618 return raw_atomic64_fetch_inc_acquire(v);
620 return raw_atomic_fetch_inc_acquire(v);
625 * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
626 * @v: pointer to atomic_long_t
628 * Atomically updates @v to (@v + 1) with release ordering.
630 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
632 * Return: The original value of @v.
634 static __always_inline long
635 raw_atomic_long_fetch_inc_release(atomic_long_t *v)
638 return raw_atomic64_fetch_inc_release(v);
640 return raw_atomic_fetch_inc_release(v);
645 * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
646 * @v: pointer to atomic_long_t
648 * Atomically updates @v to (@v + 1) with relaxed ordering.
650 * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
652 * Return: The original value of @v.
654 static __always_inline long
655 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
658 return raw_atomic64_fetch_inc_relaxed(v);
660 return raw_atomic_fetch_inc_relaxed(v);
665 * raw_atomic_long_dec() - atomic decrement with relaxed ordering
666 * @v: pointer to atomic_long_t
668 * Atomically updates @v to (@v - 1) with relaxed ordering.
670 * Safe to use in noinstr code; prefer atomic_long_dec() elsewhere.
674 static __always_inline void
675 raw_atomic_long_dec(atomic_long_t *v)
685 * raw_atomic_long_dec_return() - atomic decrement with full ordering
686 * @v: pointer to atomic_long_t
688 * Atomically updates @v to (@v - 1) with full ordering.
690 * Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere.
692 * Return: The updated value of @v.
694 static __always_inline long
695 raw_atomic_long_dec_return(atomic_long_t *v)
698 return raw_atomic64_dec_return(v);
700 return raw_atomic_dec_return(v);
705 * raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
706 * @v: pointer to atomic_long_t
708 * Atomically updates @v to (@v - 1) with acquire ordering.
710 * Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere.
712 * Return: The updated value of @v.
714 static __always_inline long
715 raw_atomic_long_dec_return_acquire(atomic_long_t *v)
718 return raw_atomic64_dec_return_acquire(v);
720 return raw_atomic_dec_return_acquire(v);
725 * raw_atomic_long_dec_return_release() - atomic decrement with release ordering
726 * @v: pointer to atomic_long_t
728 * Atomically updates @v to (@v - 1) with release ordering.
730 * Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere.
732 * Return: The updated value of @v.
734 static __always_inline long
735 raw_atomic_long_dec_return_release(atomic_long_t *v)
738 return raw_atomic64_dec_return_release(v);
740 return raw_atomic_dec_return_release(v);
745 * raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
746 * @v: pointer to atomic_long_t
748 * Atomically updates @v to (@v - 1) with relaxed ordering.
750 * Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere.
752 * Return: The updated value of @v.
754 static __always_inline long
755 raw_atomic_long_dec_return_relaxed(atomic_long_t *v)
758 return raw_atomic64_dec_return_relaxed(v);
760 return raw_atomic_dec_return_relaxed(v);
765 * raw_atomic_long_fetch_dec() - atomic decrement with full ordering
766 * @v: pointer to atomic_long_t
768 * Atomically updates @v to (@v - 1) with full ordering.
770 * Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere.
772 * Return: The original value of @v.
774 static __always_inline long
775 raw_atomic_long_fetch_dec(atomic_long_t *v)
778 return raw_atomic64_fetch_dec(v);
780 return raw_atomic_fetch_dec(v);
785 * raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
786 * @v: pointer to atomic_long_t
788 * Atomically updates @v to (@v - 1) with acquire ordering.
790 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere.
792 * Return: The original value of @v.
794 static __always_inline long
795 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v)
798 return raw_atomic64_fetch_dec_acquire(v);
800 return raw_atomic_fetch_dec_acquire(v);
805 * raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering
806 * @v: pointer to atomic_long_t
808 * Atomically updates @v to (@v - 1) with release ordering.
810 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere.
812 * Return: The original value of @v.
814 static __always_inline long
815 raw_atomic_long_fetch_dec_release(atomic_long_t *v)
818 return raw_atomic64_fetch_dec_release(v);
820 return raw_atomic_fetch_dec_release(v);
825 * raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
826 * @v: pointer to atomic_long_t
828 * Atomically updates @v to (@v - 1) with relaxed ordering.
830 * Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere.
832 * Return: The original value of @v.
834 static __always_inline long
835 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
838 return raw_atomic64_fetch_dec_relaxed(v);
840 return raw_atomic_fetch_dec_relaxed(v);
845 * raw_atomic_long_and() - atomic bitwise AND with relaxed ordering
847 * @v: pointer to atomic_long_t
849 * Atomically updates @v to (@v & @i) with relaxed ordering.
851 * Safe to use in noinstr code; prefer atomic_long_and() elsewhere.
855 static __always_inline void
856 raw_atomic_long_and(long i, atomic_long_t *v)
859 raw_atomic64_and(i, v);
861 raw_atomic_and(i, v);
866 * raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering
868 * @v: pointer to atomic_long_t
870 * Atomically updates @v to (@v & @i) with full ordering.
872 * Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere.
874 * Return: The original value of @v.
876 static __always_inline long
877 raw_atomic_long_fetch_and(long i, atomic_long_t *v)
880 return raw_atomic64_fetch_and(i, v);
882 return raw_atomic_fetch_and(i, v);
887 * raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
889 * @v: pointer to atomic_long_t
891 * Atomically updates @v to (@v & @i) with acquire ordering.
893 * Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere.
895 * Return: The original value of @v.
897 static __always_inline long
898 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
901 return raw_atomic64_fetch_and_acquire(i, v);
903 return raw_atomic_fetch_and_acquire(i, v);
908 * raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
910 * @v: pointer to atomic_long_t
912 * Atomically updates @v to (@v & @i) with release ordering.
914 * Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere.
916 * Return: The original value of @v.
918 static __always_inline long
919 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v)
922 return raw_atomic64_fetch_and_release(i, v);
924 return raw_atomic_fetch_and_release(i, v);
929 * raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
931 * @v: pointer to atomic_long_t
933 * Atomically updates @v to (@v & @i) with relaxed ordering.
935 * Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere.
937 * Return: The original value of @v.
939 static __always_inline long
940 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
943 return raw_atomic64_fetch_and_relaxed(i, v);
945 return raw_atomic_fetch_and_relaxed(i, v);
950 * raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
952 * @v: pointer to atomic_long_t
954 * Atomically updates @v to (@v & ~@i) with relaxed ordering.
956 * Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere.
960 static __always_inline void
961 raw_atomic_long_andnot(long i, atomic_long_t *v)
964 raw_atomic64_andnot(i, v);
966 raw_atomic_andnot(i, v);
971 * raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
973 * @v: pointer to atomic_long_t
975 * Atomically updates @v to (@v & ~@i) with full ordering.
977 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere.
979 * Return: The original value of @v.
981 static __always_inline long
982 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v)
985 return raw_atomic64_fetch_andnot(i, v);
987 return raw_atomic_fetch_andnot(i, v);
992 * raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
994 * @v: pointer to atomic_long_t
996 * Atomically updates @v to (@v & ~@i) with acquire ordering.
998 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere.
1000 * Return: The original value of @v.
1002 static __always_inline long
1003 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
1006 return raw_atomic64_fetch_andnot_acquire(i, v);
1008 return raw_atomic_fetch_andnot_acquire(i, v);
1013 * raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
1015 * @v: pointer to atomic_long_t
1017 * Atomically updates @v to (@v & ~@i) with release ordering.
1019 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere.
1021 * Return: The original value of @v.
1023 static __always_inline long
1024 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
1027 return raw_atomic64_fetch_andnot_release(i, v);
1029 return raw_atomic_fetch_andnot_release(i, v);
1034 * raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
1036 * @v: pointer to atomic_long_t
1038 * Atomically updates @v to (@v & ~@i) with relaxed ordering.
1040 * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere.
1042 * Return: The original value of @v.
1044 static __always_inline long
1045 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
1048 return raw_atomic64_fetch_andnot_relaxed(i, v);
1050 return raw_atomic_fetch_andnot_relaxed(i, v);
1055 * raw_atomic_long_or() - atomic bitwise OR with relaxed ordering
1057 * @v: pointer to atomic_long_t
1059 * Atomically updates @v to (@v | @i) with relaxed ordering.
1061 * Safe to use in noinstr code; prefer atomic_long_or() elsewhere.
1065 static __always_inline void
1066 raw_atomic_long_or(long i, atomic_long_t *v)
1069 raw_atomic64_or(i, v);
1071 raw_atomic_or(i, v);
1076 * raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering
1078 * @v: pointer to atomic_long_t
1080 * Atomically updates @v to (@v | @i) with full ordering.
1082 * Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere.
1084 * Return: The original value of @v.
1086 static __always_inline long
1087 raw_atomic_long_fetch_or(long i, atomic_long_t *v)
1090 return raw_atomic64_fetch_or(i, v);
1092 return raw_atomic_fetch_or(i, v);
1097 * raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
1099 * @v: pointer to atomic_long_t
1101 * Atomically updates @v to (@v | @i) with acquire ordering.
1103 * Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere.
1105 * Return: The original value of @v.
1107 static __always_inline long
1108 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
1111 return raw_atomic64_fetch_or_acquire(i, v);
1113 return raw_atomic_fetch_or_acquire(i, v);
1118 * raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
1120 * @v: pointer to atomic_long_t
1122 * Atomically updates @v to (@v | @i) with release ordering.
1124 * Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere.
1126 * Return: The original value of @v.
1128 static __always_inline long
1129 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v)
1132 return raw_atomic64_fetch_or_release(i, v);
1134 return raw_atomic_fetch_or_release(i, v);
1139 * raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
1141 * @v: pointer to atomic_long_t
1143 * Atomically updates @v to (@v | @i) with relaxed ordering.
1145 * Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere.
1147 * Return: The original value of @v.
1149 static __always_inline long
1150 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
1153 return raw_atomic64_fetch_or_relaxed(i, v);
1155 return raw_atomic_fetch_or_relaxed(i, v);
1160 * raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering
1162 * @v: pointer to atomic_long_t
1164 * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1166 * Safe to use in noinstr code; prefer atomic_long_xor() elsewhere.
1170 static __always_inline void
1171 raw_atomic_long_xor(long i, atomic_long_t *v)
1174 raw_atomic64_xor(i, v);
1176 raw_atomic_xor(i, v);
1181 * raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
1183 * @v: pointer to atomic_long_t
1185 * Atomically updates @v to (@v ^ @i) with full ordering.
1187 * Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere.
1189 * Return: The original value of @v.
1191 static __always_inline long
1192 raw_atomic_long_fetch_xor(long i, atomic_long_t *v)
1195 return raw_atomic64_fetch_xor(i, v);
1197 return raw_atomic_fetch_xor(i, v);
1202 * raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1204 * @v: pointer to atomic_long_t
1206 * Atomically updates @v to (@v ^ @i) with acquire ordering.
1208 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere.
1210 * Return: The original value of @v.
1212 static __always_inline long
1213 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
1216 return raw_atomic64_fetch_xor_acquire(i, v);
1218 return raw_atomic_fetch_xor_acquire(i, v);
1223 * raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
1225 * @v: pointer to atomic_long_t
1227 * Atomically updates @v to (@v ^ @i) with release ordering.
1229 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere.
1231 * Return: The original value of @v.
1233 static __always_inline long
1234 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
1237 return raw_atomic64_fetch_xor_release(i, v);
1239 return raw_atomic_fetch_xor_release(i, v);
1244 * raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1246 * @v: pointer to atomic_long_t
1248 * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1250 * Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere.
1252 * Return: The original value of @v.
1254 static __always_inline long
1255 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
1258 return raw_atomic64_fetch_xor_relaxed(i, v);
1260 return raw_atomic_fetch_xor_relaxed(i, v);
1265 * raw_atomic_long_xchg() - atomic exchange with full ordering
1266 * @v: pointer to atomic_long_t
1267 * @new: long value to assign
1269 * Atomically updates @v to @new with full ordering.
1271 * Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere.
1273 * Return: The original value of @v.
1275 static __always_inline long
1276 raw_atomic_long_xchg(atomic_long_t *v, long new)
1279 return raw_atomic64_xchg(v, new);
1281 return raw_atomic_xchg(v, new);
1286 * raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering
1287 * @v: pointer to atomic_long_t
1288 * @new: long value to assign
1290 * Atomically updates @v to @new with acquire ordering.
1292 * Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere.
1294 * Return: The original value of @v.
1296 static __always_inline long
1297 raw_atomic_long_xchg_acquire(atomic_long_t *v, long new)
1300 return raw_atomic64_xchg_acquire(v, new);
1302 return raw_atomic_xchg_acquire(v, new);
1307 * raw_atomic_long_xchg_release() - atomic exchange with release ordering
1308 * @v: pointer to atomic_long_t
1309 * @new: long value to assign
1311 * Atomically updates @v to @new with release ordering.
1313 * Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere.
1315 * Return: The original value of @v.
1317 static __always_inline long
1318 raw_atomic_long_xchg_release(atomic_long_t *v, long new)
1321 return raw_atomic64_xchg_release(v, new);
1323 return raw_atomic_xchg_release(v, new);
1328 * raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
1329 * @v: pointer to atomic_long_t
1330 * @new: long value to assign
1332 * Atomically updates @v to @new with relaxed ordering.
1334 * Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere.
1336 * Return: The original value of @v.
1338 static __always_inline long
1339 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new)
1342 return raw_atomic64_xchg_relaxed(v, new);
1344 return raw_atomic_xchg_relaxed(v, new);
1349 * raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering
1350 * @v: pointer to atomic_long_t
1351 * @old: long value to compare with
1352 * @new: long value to assign
1354 * If (@v == @old), atomically updates @v to @new with full ordering.
1356 * Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere.
1358 * Return: The original value of @v.
1360 static __always_inline long
1361 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
1364 return raw_atomic64_cmpxchg(v, old, new);
1366 return raw_atomic_cmpxchg(v, old, new);
1371 * raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1372 * @v: pointer to atomic_long_t
1373 * @old: long value to compare with
1374 * @new: long value to assign
1376 * If (@v == @old), atomically updates @v to @new with acquire ordering.
1378 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere.
1380 * Return: The original value of @v.
1382 static __always_inline long
1383 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
1386 return raw_atomic64_cmpxchg_acquire(v, old, new);
1388 return raw_atomic_cmpxchg_acquire(v, old, new);
1393 * raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
1394 * @v: pointer to atomic_long_t
1395 * @old: long value to compare with
1396 * @new: long value to assign
1398 * If (@v == @old), atomically updates @v to @new with release ordering.
1400 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere.
1402 * Return: The original value of @v.
1404 static __always_inline long
1405 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
1408 return raw_atomic64_cmpxchg_release(v, old, new);
1410 return raw_atomic_cmpxchg_release(v, old, new);
1415 * raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1416 * @v: pointer to atomic_long_t
1417 * @old: long value to compare with
1418 * @new: long value to assign
1420 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1422 * Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere.
1424 * Return: The original value of @v.
1426 static __always_inline long
1427 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
1430 return raw_atomic64_cmpxchg_relaxed(v, old, new);
1432 return raw_atomic_cmpxchg_relaxed(v, old, new);
1437 * raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
1438 * @v: pointer to atomic_long_t
1439 * @old: pointer to long value to compare with
1440 * @new: long value to assign
1442 * If (@v == @old), atomically updates @v to @new with full ordering.
1443 * Otherwise, updates @old to the current value of @v.
1445 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere.
1447 * Return: @true if the exchange occured, @false otherwise.
1449 static __always_inline bool
1450 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1453 return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
1455 return raw_atomic_try_cmpxchg(v, (int *)old, new);
1460 * raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1461 * @v: pointer to atomic_long_t
1462 * @old: pointer to long value to compare with
1463 * @new: long value to assign
1465 * If (@v == @old), atomically updates @v to @new with acquire ordering.
1466 * Otherwise, updates @old to the current value of @v.
1468 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere.
1470 * Return: @true if the exchange occured, @false otherwise.
1472 static __always_inline bool
1473 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1476 return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
1478 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new);
1483 * raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
1484 * @v: pointer to atomic_long_t
1485 * @old: pointer to long value to compare with
1486 * @new: long value to assign
1488 * If (@v == @old), atomically updates @v to @new with release ordering.
1489 * Otherwise, updates @old to the current value of @v.
1491 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere.
1493 * Return: @true if the exchange occured, @false otherwise.
1495 static __always_inline bool
1496 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1499 return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
1501 return raw_atomic_try_cmpxchg_release(v, (int *)old, new);
1506 * raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1507 * @v: pointer to atomic_long_t
1508 * @old: pointer to long value to compare with
1509 * @new: long value to assign
1511 * If (@v == @old), atomically updates @v to @new with relaxed ordering.
1512 * Otherwise, updates @old to the current value of @v.
1514 * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere.
1516 * Return: @true if the exchange occured, @false otherwise.
1518 static __always_inline bool
1519 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1522 return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
1524 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
1529 * raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
1530 * @i: long value to add
1531 * @v: pointer to atomic_long_t
1533 * Atomically updates @v to (@v - @i) with full ordering.
1535 * Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere.
1537 * Return: @true if the resulting value of @v is zero, @false otherwise.
1539 static __always_inline bool
1540 raw_atomic_long_sub_and_test(long i, atomic_long_t *v)
1543 return raw_atomic64_sub_and_test(i, v);
1545 return raw_atomic_sub_and_test(i, v);
1550 * raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
1551 * @v: pointer to atomic_long_t
1553 * Atomically updates @v to (@v - 1) with full ordering.
1555 * Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere.
1557 * Return: @true if the resulting value of @v is zero, @false otherwise.
1559 static __always_inline bool
1560 raw_atomic_long_dec_and_test(atomic_long_t *v)
1563 return raw_atomic64_dec_and_test(v);
1565 return raw_atomic_dec_and_test(v);
1570 * raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
1571 * @v: pointer to atomic_long_t
1573 * Atomically updates @v to (@v + 1) with full ordering.
1575 * Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere.
1577 * Return: @true if the resulting value of @v is zero, @false otherwise.
1579 static __always_inline bool
1580 raw_atomic_long_inc_and_test(atomic_long_t *v)
1583 return raw_atomic64_inc_and_test(v);
1585 return raw_atomic_inc_and_test(v);
1590 * raw_atomic_long_add_negative() - atomic add and test if negative with full ordering
1591 * @i: long value to add
1592 * @v: pointer to atomic_long_t
1594 * Atomically updates @v to (@v + @i) with full ordering.
1596 * Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere.
1598 * Return: @true if the resulting value of @v is negative, @false otherwise.
1600 static __always_inline bool
1601 raw_atomic_long_add_negative(long i, atomic_long_t *v)
1604 return raw_atomic64_add_negative(i, v);
1606 return raw_atomic_add_negative(i, v);
1611 * raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
1612 * @i: long value to add
1613 * @v: pointer to atomic_long_t
1615 * Atomically updates @v to (@v + @i) with acquire ordering.
1617 * Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere.
1619 * Return: @true if the resulting value of @v is negative, @false otherwise.
1621 static __always_inline bool
1622 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
1625 return raw_atomic64_add_negative_acquire(i, v);
1627 return raw_atomic_add_negative_acquire(i, v);
1632 * raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering
1633 * @i: long value to add
1634 * @v: pointer to atomic_long_t
1636 * Atomically updates @v to (@v + @i) with release ordering.
1638 * Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere.
1640 * Return: @true if the resulting value of @v is negative, @false otherwise.
1642 static __always_inline bool
1643 raw_atomic_long_add_negative_release(long i, atomic_long_t *v)
1646 return raw_atomic64_add_negative_release(i, v);
1648 return raw_atomic_add_negative_release(i, v);
1653 * raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1654 * @i: long value to add
1655 * @v: pointer to atomic_long_t
1657 * Atomically updates @v to (@v + @i) with relaxed ordering.
1659 * Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere.
1661 * Return: @true if the resulting value of @v is negative, @false otherwise.
1663 static __always_inline bool
1664 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
1667 return raw_atomic64_add_negative_relaxed(i, v);
1669 return raw_atomic_add_negative_relaxed(i, v);
1674 * raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering
1675 * @v: pointer to atomic_long_t
1676 * @a: long value to add
1677 * @u: long value to compare with
1679 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1681 * Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere.
1683 * Return: The original value of @v.
1685 static __always_inline long
1686 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1689 return raw_atomic64_fetch_add_unless(v, a, u);
1691 return raw_atomic_fetch_add_unless(v, a, u);
1696 * raw_atomic_long_add_unless() - atomic add unless value with full ordering
1697 * @v: pointer to atomic_long_t
1698 * @a: long value to add
1699 * @u: long value to compare with
1701 * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
1703 * Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere.
1705 * Return: @true if @v was updated, @false otherwise.
1707 static __always_inline bool
1708 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u)
1711 return raw_atomic64_add_unless(v, a, u);
1713 return raw_atomic_add_unless(v, a, u);
1718 * raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
1719 * @v: pointer to atomic_long_t
1721 * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
1723 * Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere.
1725 * Return: @true if @v was updated, @false otherwise.
1727 static __always_inline bool
1728 raw_atomic_long_inc_not_zero(atomic_long_t *v)
1731 return raw_atomic64_inc_not_zero(v);
1733 return raw_atomic_inc_not_zero(v);
1738 * raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
1739 * @v: pointer to atomic_long_t
1741 * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
1743 * Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere.
1745 * Return: @true if @v was updated, @false otherwise.
1747 static __always_inline bool
1748 raw_atomic_long_inc_unless_negative(atomic_long_t *v)
1751 return raw_atomic64_inc_unless_negative(v);
1753 return raw_atomic_inc_unless_negative(v);
1758 * raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
1759 * @v: pointer to atomic_long_t
1761 * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
1763 * Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere.
1765 * Return: @true if @v was updated, @false otherwise.
1767 static __always_inline bool
1768 raw_atomic_long_dec_unless_positive(atomic_long_t *v)
1771 return raw_atomic64_dec_unless_positive(v);
1773 return raw_atomic_dec_unless_positive(v);
1778 * raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
1779 * @v: pointer to atomic_long_t
1781 * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
1783 * Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere.
1785 * Return: The old value of (@v - 1), regardless of whether @v was updated.
1787 static __always_inline long
1788 raw_atomic_long_dec_if_positive(atomic_long_t *v)
1791 return raw_atomic64_dec_if_positive(v);
1793 return raw_atomic_dec_if_positive(v);
1797 #endif /* _LINUX_ATOMIC_LONG_H */
1798 // 4ef23f98c73cff96d239896175fd26b10b88899e