summaryrefslogtreecommitdiff
path: root/include/asm-generic/atomic.h
blob: 22142c71d35a102e186e40f9033cbb40325c8389 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
 * Generic C implementation of atomic counter operations. Do not include in
 * machine independent code.
 *
 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
 * Written by David Howells (dhowells@redhat.com)
 */
#ifndef __ASM_GENERIC_ATOMIC_H
#define __ASM_GENERIC_ATOMIC_H

#include <asm/cmpxchg.h>
#include <asm/barrier.h>

#ifdef CONFIG_SMP

/* we can build all atomic primitives from cmpxchg */

#define ATOMIC_OP(op, c_op)						\
static inline void generic_atomic_##op(int i, atomic_t *v)		\
{									\
	int c, old;							\
									\
	c = v->counter;							\
	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
		c = old;						\
}

#define ATOMIC_OP_RETURN(op, c_op)					\
static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
{									\
	int c, old;							\
									\
	c = v->counter;							\
	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
		c = old;						\
									\
	return c c_op i;						\
}

#define ATOMIC_FETCH_OP(op, c_op)					\
static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
{									\
	int c, old;							\
									\
	c = v->counter;							\
	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
		c = old;						\
									\
	return c;							\
}

#else

#include <linux/irqflags.h>

#define ATOMIC_OP(op, c_op)						\
static inline void generic_atomic_##op(int i, atomic_t *v)		\
{									\
	unsigned long flags;						\
									\
	raw_local_irq_save(flags);					\
	v->counter = v->counter c_op i;					\
	raw_local_irq_restore(flags);					\
}

#define ATOMIC_OP_RETURN(op, c_op)					\
static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
{									\
	unsigned long flags;						\
	int ret;							\
									\
	raw_local_irq_save(flags);					\
	ret = (v->counter = v->counter c_op i);				\
	raw_local_irq_restore(flags);					\
									\
	return ret;							\
}

#define ATOMIC_FETCH_OP(op, c_op)					\
static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
{									\
	unsigned long flags;						\
	int ret;							\
									\
	raw_local_irq_save(flags);					\
	ret = v->counter;						\
	v->counter = v->counter c_op i;					\
	raw_local_irq_restore(flags);					\
									\
	return ret;							\
}

#endif /* CONFIG_SMP */

ATOMIC_OP_RETURN(add, +)
ATOMIC_OP_RETURN(sub, -)

ATOMIC_FETCH_OP(add, +)
ATOMIC_FETCH_OP(sub, -)
ATOMIC_FETCH_OP(and, &)
ATOMIC_FETCH_OP(or, |)
ATOMIC_FETCH_OP(xor, ^)

ATOMIC_OP(add, +)
ATOMIC_OP(sub, -)
ATOMIC_OP(and, &)
ATOMIC_OP(or, |)
ATOMIC_OP(xor, ^)

#undef ATOMIC_FETCH_OP
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

#define arch_atomic_add_return			generic_atomic_add_return
#define arch_atomic_sub_return			generic_atomic_sub_return

#define arch_atomic_fetch_add			generic_atomic_fetch_add
#define arch_atomic_fetch_sub			generic_atomic_fetch_sub
#define arch_atomic_fetch_and			generic_atomic_fetch_and
#define arch_atomic_fetch_or			generic_atomic_fetch_or
#define arch_atomic_fetch_xor			generic_atomic_fetch_xor

#define arch_atomic_add				generic_atomic_add
#define arch_atomic_sub				generic_atomic_sub
#define arch_atomic_and				generic_atomic_and
#define arch_atomic_or				generic_atomic_or
#define arch_atomic_xor				generic_atomic_xor

#define arch_atomic_read(v)			READ_ONCE((v)->counter)
#define arch_atomic_set(v, i)			WRITE_ONCE(((v)->counter), (i))

#endif /* __ASM_GENERIC_ATOMIC_H */