Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 2 | #ifndef _ASM_X86_ATOMIC64_32_H |
| 3 | #define _ASM_X86_ATOMIC64_32_H |
| 4 | |
| 5 | #include <linux/compiler.h> |
| 6 | #include <linux/types.h> |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 7 | //#include <asm/cmpxchg.h> |
| 8 | |
| 9 | /* An 64bit atomic type */ |
| 10 | |
| 11 | typedef struct { |
| 12 | u64 __aligned(8) counter; |
| 13 | } atomic64_t; |
| 14 | |
| 15 | #define ATOMIC64_INIT(val) { (val) } |
| 16 | |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 17 | #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...) |
| 18 | #ifndef ATOMIC64_EXPORT |
| 19 | #define ATOMIC64_DECL_ONE __ATOMIC64_DECL |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 20 | #else |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 21 | #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \ |
| 22 | ATOMIC64_EXPORT(atomic64_##sym) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 23 | #endif |
| 24 | |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 25 | #ifdef CONFIG_X86_CMPXCHG64 |
| 26 | #define __alternative_atomic64(f, g, out, in...) \ |
| 27 | asm volatile("call %P[func]" \ |
| 28 | : out : [func] "i" (atomic64_##g##_cx8), ## in) |
| 29 | |
| 30 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8) |
| 31 | #else |
| 32 | #define __alternative_atomic64(f, g, out, in...) \ |
| 33 | alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \ |
| 34 | X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in) |
| 35 | |
| 36 | #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \ |
| 37 | ATOMIC64_DECL_ONE(sym##_386) |
| 38 | |
| 39 | ATOMIC64_DECL_ONE(add_386); |
| 40 | ATOMIC64_DECL_ONE(sub_386); |
| 41 | ATOMIC64_DECL_ONE(inc_386); |
| 42 | ATOMIC64_DECL_ONE(dec_386); |
| 43 | #endif |
| 44 | |
| 45 | #define alternative_atomic64(f, out, in...) \ |
| 46 | __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in) |
| 47 | |
| 48 | ATOMIC64_DECL(read); |
| 49 | ATOMIC64_DECL(set); |
| 50 | ATOMIC64_DECL(xchg); |
| 51 | ATOMIC64_DECL(add_return); |
| 52 | ATOMIC64_DECL(sub_return); |
| 53 | ATOMIC64_DECL(inc_return); |
| 54 | ATOMIC64_DECL(dec_return); |
| 55 | ATOMIC64_DECL(dec_if_positive); |
| 56 | ATOMIC64_DECL(inc_not_zero); |
| 57 | ATOMIC64_DECL(add_unless); |
| 58 | |
| 59 | #undef ATOMIC64_DECL |
| 60 | #undef ATOMIC64_DECL_ONE |
| 61 | #undef __ATOMIC64_DECL |
| 62 | #undef ATOMIC64_EXPORT |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 63 | |
| 64 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 65 | * arch_atomic64_cmpxchg - cmpxchg atomic64 variable |
Philipp Hahn | 1f04597 | 2012-05-02 18:09:35 +0200 | [diff] [blame] | 66 | * @v: pointer to type atomic64_t |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 67 | * @o: expected value |
| 68 | * @n: new value |
| 69 | * |
| 70 | * Atomically sets @v to @n if it was equal to @o and returns |
| 71 | * the old value. |
| 72 | */ |
| 73 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 74 | static inline long long arch_atomic64_cmpxchg(atomic64_t *v, long long o, |
| 75 | long long n) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 76 | { |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 77 | return arch_cmpxchg64(&v->counter, o, n); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 78 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 79 | |
| 80 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 81 | * arch_atomic64_xchg - xchg atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 82 | * @v: pointer to type atomic64_t |
| 83 | * @n: value to assign |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 84 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 85 | * Atomically xchgs the value of @v to @n and returns |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 86 | * the old value. |
| 87 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 88 | static inline long long arch_atomic64_xchg(atomic64_t *v, long long n) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 89 | { |
| 90 | long long o; |
| 91 | unsigned high = (unsigned)(n >> 32); |
| 92 | unsigned low = (unsigned)n; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 93 | alternative_atomic64(xchg, "=&A" (o), |
| 94 | "S" (v), "b" (low), "c" (high) |
| 95 | : "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 96 | return o; |
| 97 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 98 | |
| 99 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 100 | * arch_atomic64_set - set atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 101 | * @v: pointer to type atomic64_t |
Philipp Hahn | 1f04597 | 2012-05-02 18:09:35 +0200 | [diff] [blame] | 102 | * @i: value to assign |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 103 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 104 | * Atomically sets the value of @v to @n. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 105 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 106 | static inline void arch_atomic64_set(atomic64_t *v, long long i) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 107 | { |
| 108 | unsigned high = (unsigned)(i >> 32); |
| 109 | unsigned low = (unsigned)i; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 110 | alternative_atomic64(set, /* no output */, |
| 111 | "S" (v), "b" (low), "c" (high) |
| 112 | : "eax", "edx", "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 113 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 114 | |
| 115 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 116 | * arch_atomic64_read - read atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 117 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 118 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 119 | * Atomically reads the value of @v and returns it. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 120 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 121 | static inline long long arch_atomic64_read(const atomic64_t *v) |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 122 | { |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 123 | long long r; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 124 | alternative_atomic64(read, "=&A" (r), "c" (v) : "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 125 | return r; |
Joe Perches | 447a564 | 2018-03-21 15:09:32 -0700 | [diff] [blame] | 126 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 127 | |
| 128 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 129 | * arch_atomic64_add_return - add and return |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 130 | * @i: integer value to add |
| 131 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 132 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 133 | * Atomically adds @i to @v and returns @i + *@v |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 134 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 135 | static inline long long arch_atomic64_add_return(long long i, atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 136 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 137 | alternative_atomic64(add_return, |
| 138 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
| 139 | ASM_NO_INPUT_CLOBBER("memory")); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 140 | return i; |
| 141 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 142 | |
| 143 | /* |
| 144 | * Other variants with different arithmetic operators: |
| 145 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 146 | static inline long long arch_atomic64_sub_return(long long i, atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 147 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 148 | alternative_atomic64(sub_return, |
| 149 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
| 150 | ASM_NO_INPUT_CLOBBER("memory")); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 151 | return i; |
| 152 | } |
| 153 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 154 | static inline long long arch_atomic64_inc_return(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 155 | { |
| 156 | long long a; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 157 | alternative_atomic64(inc_return, "=&A" (a), |
| 158 | "S" (v) : "memory", "ecx"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 159 | return a; |
| 160 | } |
Mark Rutland | 9837559 | 2018-06-21 13:13:19 +0100 | [diff] [blame] | 161 | #define arch_atomic64_inc_return arch_atomic64_inc_return |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 162 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 163 | static inline long long arch_atomic64_dec_return(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 164 | { |
| 165 | long long a; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 166 | alternative_atomic64(dec_return, "=&A" (a), |
| 167 | "S" (v) : "memory", "ecx"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 168 | return a; |
| 169 | } |
Mark Rutland | 9837559 | 2018-06-21 13:13:19 +0100 | [diff] [blame] | 170 | #define arch_atomic64_dec_return arch_atomic64_dec_return |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 171 | |
| 172 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 173 | * arch_atomic64_add - add integer to atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 174 | * @i: integer value to add |
| 175 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 176 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 177 | * Atomically adds @i to @v. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 178 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 179 | static inline long long arch_atomic64_add(long long i, atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 180 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 181 | __alternative_atomic64(add, add_return, |
| 182 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
| 183 | ASM_NO_INPUT_CLOBBER("memory")); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 184 | return i; |
| 185 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 186 | |
| 187 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 188 | * arch_atomic64_sub - subtract the atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 189 | * @i: integer value to subtract |
| 190 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 191 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 192 | * Atomically subtracts @i from @v. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 193 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 194 | static inline long long arch_atomic64_sub(long long i, atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 195 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 196 | __alternative_atomic64(sub, sub_return, |
| 197 | ASM_OUTPUT2("+A" (i), "+c" (v)), |
| 198 | ASM_NO_INPUT_CLOBBER("memory")); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 199 | return i; |
| 200 | } |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 201 | |
| 202 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 203 | * arch_atomic64_inc - increment atomic64 variable |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 204 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 205 | * |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 206 | * Atomically increments @v by 1. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 207 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 208 | static inline void arch_atomic64_inc(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 209 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 210 | __alternative_atomic64(inc, inc_return, /* no output */, |
| 211 | "S" (v) : "memory", "eax", "ecx", "edx"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 212 | } |
Randy Dunlap | 4331f4d | 2018-09-02 19:30:53 -0700 | [diff] [blame] | 213 | #define arch_atomic64_inc arch_atomic64_inc |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 214 | |
| 215 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 216 | * arch_atomic64_dec - decrement atomic64 variable |
Philipp Hahn | 1f04597 | 2012-05-02 18:09:35 +0200 | [diff] [blame] | 217 | * @v: pointer to type atomic64_t |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 218 | * |
Philipp Hahn | 1f04597 | 2012-05-02 18:09:35 +0200 | [diff] [blame] | 219 | * Atomically decrements @v by 1. |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 220 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 221 | static inline void arch_atomic64_dec(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 222 | { |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 223 | __alternative_atomic64(dec, dec_return, /* no output */, |
| 224 | "S" (v) : "memory", "eax", "ecx", "edx"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 225 | } |
Randy Dunlap | 4331f4d | 2018-09-02 19:30:53 -0700 | [diff] [blame] | 226 | #define arch_atomic64_dec arch_atomic64_dec |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 227 | |
| 228 | /** |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 229 | * arch_atomic64_add_unless - add unless the number is a given value |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 230 | * @v: pointer of type atomic64_t |
| 231 | * @a: the amount to add to v... |
| 232 | * @u: ...unless v is equal to u. |
| 233 | * |
| 234 | * Atomically adds @a to @v, so long as it was not @u. |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 235 | * Returns non-zero if the add was done, zero otherwise. |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 236 | */ |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 237 | static inline int arch_atomic64_add_unless(atomic64_t *v, long long a, |
| 238 | long long u) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 239 | { |
| 240 | unsigned low = (unsigned)u; |
| 241 | unsigned high = (unsigned)(u >> 32); |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 242 | alternative_atomic64(add_unless, |
Jan Beulich | cb8095b | 2012-01-20 16:22:04 +0000 | [diff] [blame] | 243 | ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)), |
| 244 | "S" (v) : "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 245 | return (int)a; |
| 246 | } |
| 247 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 248 | static inline int arch_atomic64_inc_not_zero(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 249 | { |
| 250 | int r; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 251 | alternative_atomic64(inc_not_zero, "=&a" (r), |
| 252 | "S" (v) : "ecx", "edx", "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 253 | return r; |
| 254 | } |
Randy Dunlap | 4331f4d | 2018-09-02 19:30:53 -0700 | [diff] [blame] | 255 | #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 256 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 257 | static inline long long arch_atomic64_dec_if_positive(atomic64_t *v) |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 258 | { |
| 259 | long long r; |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 260 | alternative_atomic64(dec_if_positive, "=&A" (r), |
| 261 | "S" (v) : "ecx", "memory"); |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 262 | return r; |
| 263 | } |
Randy Dunlap | 4331f4d | 2018-09-02 19:30:53 -0700 | [diff] [blame] | 264 | #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive |
Luca Barbieri | a7e926a | 2010-02-24 10:54:25 +0100 | [diff] [blame] | 265 | |
Jan Beulich | 819165f | 2012-01-20 16:21:41 +0000 | [diff] [blame] | 266 | #undef alternative_atomic64 |
| 267 | #undef __alternative_atomic64 |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 268 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 269 | static inline void arch_atomic64_and(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 270 | { |
| 271 | long long old, c = 0; |
| 272 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 273 | while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 274 | c = old; |
Peter Zijlstra | 7fc1845 | 2014-04-23 20:28:37 +0200 | [diff] [blame] | 275 | } |
| 276 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 277 | static inline long long arch_atomic64_fetch_and(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 278 | { |
| 279 | long long old, c = 0; |
| 280 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 281 | while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 282 | c = old; |
| 283 | |
| 284 | return old; |
Peter Zijlstra | a8bccca | 2016-04-18 01:16:03 +0200 | [diff] [blame] | 285 | } |
Peter Zijlstra | 7fc1845 | 2014-04-23 20:28:37 +0200 | [diff] [blame] | 286 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 287 | static inline void arch_atomic64_or(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 288 | { |
| 289 | long long old, c = 0; |
| 290 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 291 | while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 292 | c = old; |
| 293 | } |
| 294 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 295 | static inline long long arch_atomic64_fetch_or(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 296 | { |
| 297 | long long old, c = 0; |
| 298 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 299 | while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 300 | c = old; |
| 301 | |
| 302 | return old; |
| 303 | } |
| 304 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 305 | static inline void arch_atomic64_xor(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 306 | { |
| 307 | long long old, c = 0; |
| 308 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 309 | while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 310 | c = old; |
| 311 | } |
| 312 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 313 | static inline long long arch_atomic64_fetch_xor(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 314 | { |
| 315 | long long old, c = 0; |
| 316 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 317 | while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 318 | c = old; |
| 319 | |
| 320 | return old; |
| 321 | } |
| 322 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 323 | static inline long long arch_atomic64_fetch_add(long long i, atomic64_t *v) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 324 | { |
| 325 | long long old, c = 0; |
| 326 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 327 | while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c) |
Dmitry Vyukov | ba1c9f8 | 2017-06-17 11:15:27 +0200 | [diff] [blame] | 328 | c = old; |
| 329 | |
| 330 | return old; |
| 331 | } |
Peter Zijlstra | a8bccca | 2016-04-18 01:16:03 +0200 | [diff] [blame] | 332 | |
Dmitry Vyukov | 8bf705d | 2018-01-29 18:26:05 +0100 | [diff] [blame] | 333 | #define arch_atomic64_fetch_sub(i, v) arch_atomic64_fetch_add(-(i), (v)) |
Peter Zijlstra | a8bccca | 2016-04-18 01:16:03 +0200 | [diff] [blame] | 334 | |
Brian Gerst | 1a3b1d8 | 2010-01-07 11:53:33 -0500 | [diff] [blame] | 335 | #endif /* _ASM_X86_ATOMIC64_32_H */ |