blob: 14de0432d288414bd1437e44b8cb13facc6f12e9 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Howellsf05e7982012-03-28 18:11:12 +01002#ifndef _ASM_X86_BARRIER_H
3#define _ASM_X86_BARRIER_H
4
5#include <asm/alternative.h>
6#include <asm/nops.h>
7
8/*
9 * Force strict CPU ordering.
Michael S. Tsirkin57d9b1b2016-01-28 19:02:44 +020010 * And yes, this might be required on UP too when we're talking
David Howellsf05e7982012-03-28 18:11:12 +010011 * to devices.
12 */
13
14#ifdef CONFIG_X86_32
Michael S. Tsirkin450cbdd2017-10-27 19:14:31 +030015#define mb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "mfence", \
Michael S. Tsirkinbd922472016-01-28 19:02:29 +020016 X86_FEATURE_XMM2) ::: "memory", "cc")
Michael S. Tsirkin450cbdd2017-10-27 19:14:31 +030017#define rmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "lfence", \
Michael S. Tsirkinbd922472016-01-28 19:02:29 +020018 X86_FEATURE_XMM2) ::: "memory", "cc")
Michael S. Tsirkin450cbdd2017-10-27 19:14:31 +030019#define wmb() asm volatile(ALTERNATIVE("lock; addl $0,-4(%%esp)", "sfence", \
Michael S. Tsirkinbd922472016-01-28 19:02:29 +020020 X86_FEATURE_XMM2) ::: "memory", "cc")
David Howellsf05e7982012-03-28 18:11:12 +010021#else
22#define mb() asm volatile("mfence":::"memory")
23#define rmb() asm volatile("lfence":::"memory")
24#define wmb() asm volatile("sfence" ::: "memory")
25#endif
26
Dan Williamsbabdde22018-01-29 17:02:28 -080027/**
28 * array_index_mask_nospec() - generate a mask that is ~0UL when the
29 * bounds check succeeds and 0 otherwise
30 * @index: array element index
31 * @size: number of elements in array
32 *
33 * Returns:
34 * 0 - (index < size)
35 */
36static inline unsigned long array_index_mask_nospec(unsigned long index,
37 unsigned long size)
38{
39 unsigned long mask;
40
Dan Williamseab68702018-06-07 09:13:48 -070041 asm volatile ("cmp %1,%2; sbb %0,%0;"
Dan Williamsbabdde22018-01-29 17:02:28 -080042 :"=r" (mask)
Dan Williamsbe3233f2018-02-06 18:22:40 -080043 :"g"(size),"r" (index)
Dan Williamsbabdde22018-01-29 17:02:28 -080044 :"cc");
45 return mask;
46}
47
48/* Override the default implementation from linux/nospec.h. */
49#define array_index_mask_nospec array_index_mask_nospec
50
Dan Williamsb3d7ad82018-01-29 17:02:33 -080051/* Prevent speculative execution past this barrier. */
52#define barrier_nospec() alternative_2("", "mfence", X86_FEATURE_MFENCE_RDTSC, \
53 "lfence", X86_FEATURE_LFENCE_RDTSC)
54
Alexander Duyck1077fa32014-12-11 15:02:06 -080055#define dma_rmb() barrier()
Alexander Duyck1077fa32014-12-11 15:02:06 -080056#define dma_wmb() barrier()
57
Michael S. Tsirkin450cbdd2017-10-27 19:14:31 +030058#ifdef CONFIG_X86_32
59#define __smp_mb() asm volatile("lock; addl $0,-4(%%esp)" ::: "memory", "cc")
60#else
61#define __smp_mb() asm volatile("lock; addl $0,-4(%%rsp)" ::: "memory", "cc")
62#endif
Michael S. Tsirkin1638fb72015-12-27 15:04:42 +020063#define __smp_rmb() dma_rmb()
64#define __smp_wmb() barrier()
65#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
Peter Zijlstra47933ad2013-11-06 14:57:36 +010066
Michael S. Tsirkin1638fb72015-12-27 15:04:42 +020067#define __smp_store_release(p, v) \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010068do { \
69 compiletime_assert_atomic_type(*p); \
70 barrier(); \
Andrey Konovalov76695af2015-08-02 17:11:04 +020071 WRITE_ONCE(*p, v); \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010072} while (0)
73
Michael S. Tsirkin1638fb72015-12-27 15:04:42 +020074#define __smp_load_acquire(p) \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010075({ \
Andrey Konovalov76695af2015-08-02 17:11:04 +020076 typeof(*p) ___p1 = READ_ONCE(*p); \
Peter Zijlstra47933ad2013-11-06 14:57:36 +010077 compiletime_assert_atomic_type(*p); \
78 barrier(); \
79 ___p1; \
80})
81
Peter Zijlstrad00a5692014-03-13 19:00:35 +010082/* Atomic operations are already serializing on x86 */
Michael S. Tsirkin1638fb72015-12-27 15:04:42 +020083#define __smp_mb__before_atomic() barrier()
84#define __smp_mb__after_atomic() barrier()
Peter Zijlstrad00a5692014-03-13 19:00:35 +010085
Michael S. Tsirkin300b06d2015-12-21 09:22:18 +020086#include <asm-generic/barrier.h>
87
David Howellsf05e7982012-03-28 18:11:12 +010088#endif /* _ASM_X86_BARRIER_H */