barrier.h 5.97 KB
Newer Older
1
/*
2
 * Generic barrier definitions.
David Howells's avatar
David Howells committed
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
 *
 * It should be possible to use these on really simple architectures,
 * but it serves more as a starting point for new ports.
 *
 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
 * Written by David Howells (dhowells@redhat.com)
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public Licence
 * as published by the Free Software Foundation; either version
 * 2 of the Licence, or (at your option) any later version.
 */
#ifndef __ASM_GENERIC_BARRIER_H
#define __ASM_GENERIC_BARRIER_H

#ifndef __ASSEMBLY__

20
21
22
23
24
#include <linux/compiler.h>

#ifndef nop
#define nop()	asm volatile ("nop")
#endif
David Howells's avatar
David Howells committed
25
26

/*
27
28
 * Force strict CPU ordering. And yes, this is required on UP too when we're
 * talking to devices.
David Howells's avatar
David Howells committed
29
 *
30
 * Fall back to compiler barriers if nothing better is provided.
David Howells's avatar
David Howells committed
31
32
 */

33
34
35
36
37
#ifndef mb
#define mb()	barrier()
#endif

#ifndef rmb
David Howells's avatar
David Howells committed
38
#define rmb()	mb()
39
40
41
42
43
44
#endif

#ifndef wmb
#define wmb()	mb()
#endif

45
46
47
48
49
50
51
52
#ifndef dma_rmb
#define dma_rmb()	rmb()
#endif

#ifndef dma_wmb
#define dma_wmb()	wmb()
#endif

53
54
55
#ifndef read_barrier_depends
#define read_barrier_depends()		do { } while (0)
#endif
David Howells's avatar
David Howells committed
56

57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
#ifndef __smp_mb
#define __smp_mb()	mb()
#endif

#ifndef __smp_rmb
#define __smp_rmb()	rmb()
#endif

#ifndef __smp_wmb
#define __smp_wmb()	wmb()
#endif

#ifndef __smp_read_barrier_depends
#define __smp_read_barrier_depends()	read_barrier_depends()
#endif

David Howells's avatar
David Howells committed
73
#ifdef CONFIG_SMP
74
75

#ifndef smp_mb
76
#define smp_mb()	__smp_mb()
77
78
79
#endif

#ifndef smp_rmb
80
#define smp_rmb()	__smp_rmb()
81
82
83
#endif

#ifndef smp_wmb
84
#define smp_wmb()	__smp_wmb()
85
86
87
#endif

#ifndef smp_read_barrier_depends
88
#define smp_read_barrier_depends()	__smp_read_barrier_depends()
89
90
#endif

91
92
#else	/* !CONFIG_SMP */

93
#ifndef smp_mb
David Howells's avatar
David Howells committed
94
#define smp_mb()	barrier()
95
96
97
#endif

#ifndef smp_rmb
David Howells's avatar
David Howells committed
98
#define smp_rmb()	barrier()
99
100
101
#endif

#ifndef smp_wmb
David Howells's avatar
David Howells committed
102
#define smp_wmb()	barrier()
103
104
105
#endif

#ifndef smp_read_barrier_depends
106
#define smp_read_barrier_depends()	do { } while (0)
David Howells's avatar
David Howells committed
107
108
#endif

109
#endif	/* CONFIG_SMP */
110

111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
#ifndef __smp_store_mb
#define __smp_store_mb(var, value)  do { WRITE_ONCE(var, value); __smp_mb(); } while (0)
#endif

#ifndef __smp_mb__before_atomic
#define __smp_mb__before_atomic()	__smp_mb()
#endif

#ifndef __smp_mb__after_atomic
#define __smp_mb__after_atomic()	__smp_mb()
#endif

#ifndef __smp_store_release
#define __smp_store_release(p, v)					\
do {									\
	compiletime_assert_atomic_type(*p);				\
	__smp_mb();							\
	WRITE_ONCE(*p, v);						\
} while (0)
#endif

#ifndef __smp_load_acquire
#define __smp_load_acquire(p)						\
({									\
	typeof(*p) ___p1 = READ_ONCE(*p);				\
	compiletime_assert_atomic_type(*p);				\
	__smp_mb();							\
	___p1;								\
})
#endif

#ifdef CONFIG_SMP

#ifndef smp_store_mb
#define smp_store_mb(var, value)  __smp_store_mb(var, value)
#endif

#ifndef smp_mb__before_atomic
#define smp_mb__before_atomic()	__smp_mb__before_atomic()
#endif

#ifndef smp_mb__after_atomic
#define smp_mb__after_atomic()	__smp_mb__after_atomic()
#endif

#ifndef smp_store_release
#define smp_store_release(p, v) __smp_store_release(p, v)
#endif

#ifndef smp_load_acquire
#define smp_load_acquire(p) __smp_load_acquire(p)
#endif

#else	/* !CONFIG_SMP */

166
#ifndef smp_store_mb
167
#define smp_store_mb(var, value)  do { WRITE_ONCE(var, value); barrier(); } while (0)
168
#endif
David Howells's avatar
David Howells committed
169

170
#ifndef smp_mb__before_atomic
171
#define smp_mb__before_atomic()	barrier()
172
173
174
#endif

#ifndef smp_mb__after_atomic
175
#define smp_mb__after_atomic()	barrier()
176
177
#endif

178
#ifndef smp_store_release
179
180
181
#define smp_store_release(p, v)						\
do {									\
	compiletime_assert_atomic_type(*p);				\
182
	barrier();							\
183
	WRITE_ONCE(*p, v);						\
184
} while (0)
185
#endif
186

187
#ifndef smp_load_acquire
188
189
#define smp_load_acquire(p)						\
({									\
190
	typeof(*p) ___p1 = READ_ONCE(*p);				\
191
	compiletime_assert_atomic_type(*p);				\
192
	barrier();							\
193
194
	___p1;								\
})
195
#endif
196

197
#endif	/* CONFIG_SMP */
198

199
200
201
202
203
204
205
206
207
208
209
/* Barriers for virtual machine guests when talking to an SMP host */
#define virt_mb() __smp_mb()
#define virt_rmb() __smp_rmb()
#define virt_wmb() __smp_wmb()
#define virt_read_barrier_depends() __smp_read_barrier_depends()
#define virt_store_mb(var, value) __smp_store_mb(var, value)
#define virt_mb__before_atomic() __smp_mb__before_atomic()
#define virt_mb__after_atomic()	__smp_mb__after_atomic()
#define virt_store_release(p, v) __smp_store_release(p, v)
#define virt_load_acquire(p) __smp_load_acquire(p)

210
211
212
213
214
215
216
217
218
219
220
221
222
223
/**
 * smp_acquire__after_ctrl_dep() - Provide ACQUIRE ordering after a control dependency
 *
 * A control dependency provides a LOAD->STORE order, the additional RMB
 * provides LOAD->LOAD order, together they provide LOAD->{LOAD,STORE} order,
 * aka. (load)-ACQUIRE.
 *
 * Architectures that do not do load speculation can have this be barrier().
 */
#ifndef smp_acquire__after_ctrl_dep
#define smp_acquire__after_ctrl_dep()		smp_rmb()
#endif

/**
224
 * smp_cond_load_relaxed() - (Spin) wait for cond with no ordering guarantees
225
226
227
 * @ptr: pointer to the variable to wait on
 * @cond: boolean expression to wait for
 *
228
 * Equivalent to using READ_ONCE() on the condition variable.
229
230
231
232
 *
 * Due to C lacking lambda expressions we load the value of *ptr into a
 * pre-named variable @VAL to be used in @cond.
 */
233
234
#ifndef smp_cond_load_relaxed
#define smp_cond_load_relaxed(ptr, cond_expr) ({		\
235
236
237
238
239
240
241
242
243
244
245
246
	typeof(ptr) __PTR = (ptr);				\
	typeof(*ptr) VAL;					\
	for (;;) {						\
		VAL = READ_ONCE(*__PTR);			\
		if (cond_expr)					\
			break;					\
		cpu_relax();					\
	}							\
	VAL;							\
})
#endif

247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
/**
 * smp_cond_load_acquire() - (Spin) wait for cond with ACQUIRE ordering
 * @ptr: pointer to the variable to wait on
 * @cond: boolean expression to wait for
 *
 * Equivalent to using smp_load_acquire() on the condition variable but employs
 * the control dependency of the wait to reduce the barrier on many platforms.
 */
#ifndef smp_cond_load_acquire
#define smp_cond_load_acquire(ptr, cond_expr) ({		\
	typeof(*ptr) _val;					\
	_val = smp_cond_load_relaxed(ptr, cond_expr);		\
	smp_acquire__after_ctrl_dep();				\
	_val;							\
})
#endif

David Howells's avatar
David Howells committed
264
265
#endif /* !__ASSEMBLY__ */
#endif /* __ASM_GENERIC_BARRIER_H */