@@ -37,62 +37,62 @@ static __inline__ void arch_atomic_set(atomic_t *v, int i)
37
37
__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m<>" (v -> counter ) : "r" (i ));
38
38
}
39
39
40
- #define ATOMIC_OP (op , asm_op ) \
40
+ #define ATOMIC_OP (op , asm_op , suffix , sign , ...) \
41
41
static __inline__ void arch_atomic_##op(int a, atomic_t *v) \
42
42
{ \
43
43
int t; \
44
44
\
45
45
__asm__ __volatile__( \
46
46
"1: lwarx %0,0,%3 # atomic_" #op "\n" \
47
- #asm_op " %0,%2,%0 \n" \
47
+ #asm_op "%I2" suffix " %0,%0,%2 \n" \
48
48
" stwcx. %0,0,%3 \n" \
49
49
" bne- 1b\n" \
50
50
: "=&r" (t), "+m" (v->counter) \
51
- : "r" (a), "r" (&v->counter) \
52
- : "cc"); \
51
+ : "r"#sign (a), "r" (&v->counter) \
52
+ : "cc", ##__VA_ARGS__); \
53
53
} \
54
54
55
- #define ATOMIC_OP_RETURN_RELAXED (op , asm_op ) \
55
+ #define ATOMIC_OP_RETURN_RELAXED (op , asm_op , suffix , sign , ...) \
56
56
static inline int arch_atomic_##op##_return_relaxed(int a, atomic_t *v) \
57
57
{ \
58
58
int t; \
59
59
\
60
60
__asm__ __volatile__( \
61
61
"1: lwarx %0,0,%3 # atomic_" #op "_return_relaxed\n" \
62
- #asm_op " %0,%2,%0 \n" \
62
+ #asm_op "%I2" suffix " %0,%0,%2 \n" \
63
63
" stwcx. %0,0,%3\n" \
64
64
" bne- 1b\n" \
65
65
: "=&r" (t), "+m" (v->counter) \
66
- : "r" (a), "r" (&v->counter) \
67
- : "cc"); \
66
+ : "r"#sign (a), "r" (&v->counter) \
67
+ : "cc", ##__VA_ARGS__); \
68
68
\
69
69
return t; \
70
70
}
71
71
72
- #define ATOMIC_FETCH_OP_RELAXED (op , asm_op ) \
72
+ #define ATOMIC_FETCH_OP_RELAXED (op , asm_op , suffix , sign , ...) \
73
73
static inline int arch_atomic_fetch_##op##_relaxed(int a, atomic_t *v) \
74
74
{ \
75
75
int res, t; \
76
76
\
77
77
__asm__ __volatile__( \
78
78
"1: lwarx %0,0,%4 # atomic_fetch_" #op "_relaxed\n" \
79
- #asm_op " %1,%3,%0 \n" \
79
+ #asm_op "%I3" suffix " %1,%0,%3 \n" \
80
80
" stwcx. %1,0,%4\n" \
81
81
" bne- 1b\n" \
82
82
: "=&r" (res), "=&r" (t), "+m" (v->counter) \
83
- : "r" (a), "r" (&v->counter) \
84
- : "cc"); \
83
+ : "r"#sign (a), "r" (&v->counter) \
84
+ : "cc", ##__VA_ARGS__); \
85
85
\
86
86
return res; \
87
87
}
88
88
89
- #define ATOMIC_OPS (op , asm_op ) \
90
- ATOMIC_OP(op, asm_op) \
91
- ATOMIC_OP_RETURN_RELAXED(op, asm_op) \
92
- ATOMIC_FETCH_OP_RELAXED(op, asm_op)
89
+ #define ATOMIC_OPS (op , asm_op , suffix , sign , ...) \
90
+ ATOMIC_OP(op, asm_op, suffix, sign, ##__VA_ARGS__) \
91
+ ATOMIC_OP_RETURN_RELAXED(op, asm_op, suffix, sign, ##__VA_ARGS__) \
92
+ ATOMIC_FETCH_OP_RELAXED(op, asm_op, suffix, sign, ##__VA_ARGS__ )
93
93
94
- ATOMIC_OPS (add , add )
95
- ATOMIC_OPS (sub , subf )
94
+ ATOMIC_OPS (add , add , "c" , I , "xer" )
95
+ ATOMIC_OPS (sub , sub , "c" , I , "xer" )
96
96
97
97
#define arch_atomic_add_return_relaxed arch_atomic_add_return_relaxed
98
98
#define arch_atomic_sub_return_relaxed arch_atomic_sub_return_relaxed
@@ -101,13 +101,13 @@ ATOMIC_OPS(sub, subf)
101
101
#define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub_relaxed
102
102
103
103
#undef ATOMIC_OPS
104
- #define ATOMIC_OPS (op , asm_op ) \
105
- ATOMIC_OP(op, asm_op) \
106
- ATOMIC_FETCH_OP_RELAXED(op, asm_op)
104
+ #define ATOMIC_OPS (op , asm_op , suffix , sign ) \
105
+ ATOMIC_OP(op, asm_op, suffix, sign) \
106
+ ATOMIC_FETCH_OP_RELAXED(op, asm_op, suffix, sign )
107
107
108
- ATOMIC_OPS (and , and )
109
- ATOMIC_OPS (or , or )
110
- ATOMIC_OPS (xor , xor )
108
+ ATOMIC_OPS (and , and , "." , K )
109
+ ATOMIC_OPS (or , or , "" , K )
110
+ ATOMIC_OPS (xor , xor , "" , K )
111
111
112
112
#define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and_relaxed
113
113
#define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or_relaxed
@@ -241,15 +241,15 @@ static __inline__ int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
241
241
"1: lwarx %0,0,%1 # atomic_fetch_add_unless\n\
242
242
cmpw 0,%0,%3 \n\
243
243
beq 2f \n\
244
- add %0,%2,%0 \n"
244
+ add%I2c %0,%0,%2 \n"
245
245
" stwcx. %0,0,%1 \n\
246
246
bne- 1b \n"
247
247
PPC_ATOMIC_EXIT_BARRIER
248
- " subf %0,%2,%0 \n\
248
+ " sub%I2c %0,%0,%2 \n\
249
249
2:"
250
250
: "=&r" (t )
251
- : "r" (& v -> counter ), "r " (a ), "r" (u )
252
- : "cc" , "memory" );
251
+ : "r" (& v -> counter ), "rI " (a ), "r" (u )
252
+ : "cc" , "memory" , "xer" );
253
253
254
254
return t ;
255
255
}
0 commit comments