Browse code

- x86: atomic_add - sparc64: atomic_cmpxchg, atomic_add

Andrei Pelinescu-Onciul authored on 08/05/2007 19:41:59
Showing 3 changed files
... ...
@@ -38,6 +38,7 @@
38 38
  * History:
39 39
  * --------
40 40
  *  2006-03-28  created by andrei
41
+ *  2007-05-08 added atomic_add and atomic_cmpxchg (andrei)
41 42
  */
42 43
 
43 44
 
... ...
@@ -121,6 +122,34 @@
121 122
 		return RET_EXPR; \
122 123
 	}
123 124
 
125
+/* same as above, but uses a short 1 op sequence 
126
+ * %2 (or %1) is var, %0 is  v and return (ret)*/
127
+#define ATOMIC_FUNC_DECL1_RAW(NAME, OP, P_TYPE, RET_TYPE, RET_EXPR) \
128
+	inline static RET_TYPE atomic_##NAME##_##P_TYPE (volatile P_TYPE *var, \
129
+															P_TYPE v) \
130
+	{ \
131
+		P_TYPE ret; \
132
+		asm volatile( \
133
+			OP "\n\t" \
134
+			: "=&r"(ret), "=m"(*var) : "r"(var), "0"(v) : "cc" \
135
+			); \
136
+		return RET_EXPR; \
137
+	}
138
+
139
+/* same as above, but takes two extra params, v, which goes in %4
140
+ * and uses a short 1 op sequence:
141
+ * %2 (or %1) is var, %3 is v1 and %0 is v2 & result (ret) */
142
+#define ATOMIC_FUNC_DECL2_CAS(NAME, OP, P_TYPE, RET_TYPE, RET_EXPR) \
143
+	inline static RET_TYPE atomic_##NAME##_##P_TYPE (volatile P_TYPE *var, \
144
+													P_TYPE v1, P_TYPE v2) \
145
+	{ \
146
+		P_TYPE ret; \
147
+		asm volatile( \
148
+			OP "\n\t" \
149
+			: "=&r"(ret), "=m"(*var) : "r"(var), "r"(v1), "0"(v2) : "cc" \
150
+			); \
151
+		return RET_EXPR; \
152
+	}
124 153
 
125 154
 
126 155
 
... ...
@@ -130,7 +159,11 @@ ATOMIC_FUNC_DECL1(and,     "and  %0, %4, %1", int, void, /* no return */ )
130 159
 ATOMIC_FUNC_DECL1(or,      "or   %0, %4, %1", int, void, /* no return */ )
131 160
 ATOMIC_FUNC_DECL(inc_and_test, "add   %0, 1, %1", int, int, ((ret+1)==0) )
132 161
 ATOMIC_FUNC_DECL(dec_and_test, "sub   %0, 1, %1", int, int, ((ret-1)==0) )
133
-ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , int, int,  ret)
162
+/* deprecated but probably better then CAS for futexes */
163
+ATOMIC_FUNC_DECL1_RAW(get_and_set, "swap [%2], %0", int, int, ret)
164
+/*ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , int, int,  ret)*/
165
+ATOMIC_FUNC_DECL1(add,     "add  %0, %4, %1", int, int,  ret+v)
166
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "cas  [%2], %3, %0", int, int,  ret)
134 167
 
135 168
 
136 169
 ATOMIC_FUNC_DECL(inc,      "add  %0,  1, %1", long, void, /* no return */ )
... ...
@@ -140,6 +173,12 @@ ATOMIC_FUNC_DECL1(or,      "or   %0, %4, %1", long, void, /* no return */ )
140 173
 ATOMIC_FUNC_DECL(inc_and_test, "add   %0, 1, %1", long, long, ((ret+1)==0) )
141 174
 ATOMIC_FUNC_DECL(dec_and_test, "sub   %0, 1, %1", long, long, ((ret-1)==0) )
142 175
 ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , long, long,  ret)
176
+ATOMIC_FUNC_DECL1(add,     "add  %0, %4, %1", long, long,  ret+v)
177
+#ifdef SPARC64_MODE
178
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "casx  [%2], %3, %0", long, long,  ret)
179
+#else
180
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "cas   [%2], %3, %0", long, long,  ret)
181
+#endif
143 182
 
144 183
 
145 184
 #define atomic_inc(var) atomic_inc_int(&(var)->val)
... ...
@@ -149,6 +188,10 @@ ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , long, long,  ret)
149 188
 #define atomic_dec_and_test(var) atomic_dec_and_test_int(&(var)->val)
150 189
 #define atomic_inc_and_test(var) atomic_inc_and_test_int(&(var)->val)
151 190
 #define atomic_get_and_set(var, i) atomic_get_and_set_int(&(var)->val, i)
191
+#define atomic_add(var, i) atomic_add_int(&(var)->val, i)
192
+#define atomic_cmpxchg(var, old, new_v) \
193
+	atomic_cmpxchg_int(&(var)->val, old, new_v)
194
+
152 195
 
153 196
 
154 197
 /* with integrated membar */
... ...
@@ -43,6 +43,7 @@
43 43
  * --------
44 44
  *  2006-03-08  created by andrei
45 45
  *  2007-05-07  added cmpxchg (andrei)
46
+ *  2007-05-08  added atomic_add (andrei)
46 47
  */
47 48
 
48 49
 #ifndef _atomic_x86_h
... ...
@@ -184,6 +185,18 @@
184 185
 		return ret; \
185 186
 	}
186 187
 
188
+/* similar w/ XCHG but with LOCK prefix, relaxed constraints & diff. return */
189
+#define ATOMIC_FUNC_XADD(NAME, OP, TYPE) \
190
+	inline static TYPE atomic_##NAME##_##TYPE(volatile TYPE* var, TYPE v) \
191
+{ \
192
+	TYPE ret; \
193
+	asm volatile( \
194
+			__LOCK_PREF " " OP " \n\t" \
195
+			: "=r"(ret), "=m"(*var) :"m"(*var), "0"(v) : "cc", "memory" \
196
+			); \
197
+	return ret+v; \
198
+}
199
+
187 200
 ATOMIC_FUNC_DECL1(inc, "incl %0", int)
188 201
 ATOMIC_FUNC_DECL1(dec, "decl %0", int)
189 202
 ATOMIC_FUNC_DECL2(and, "andl %1, %0", int)
... ...
@@ -192,6 +205,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incl %0", int, int)
192 205
 ATOMIC_FUNC_TEST(dec_and_test, "decl %0", int, int)
193 206
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgl %1, %0", int)
194 207
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", int , int)
208
+ATOMIC_FUNC_XADD(add, "xaddl %0, %1", int) 
195 209
 #ifdef __CPU_x86_64
196 210
 ATOMIC_FUNC_DECL1(inc, "incq %0", long)
197 211
 ATOMIC_FUNC_DECL1(dec, "decq %0", long)
... ...
@@ -201,6 +215,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incq %0", long, int)
201 215
 ATOMIC_FUNC_TEST(dec_and_test, "decq %0", long, int)
202 216
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgq %1, %0", long)
203 217
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgq %2, %1", long , long)
218
+ATOMIC_FUNC_XADD(add, "xaddq %0, %1",long) 
204 219
 #else
205 220
 ATOMIC_FUNC_DECL1(inc, "incl %0", long)
206 221
 ATOMIC_FUNC_DECL1(dec, "decl %0", long)
... ...
@@ -210,6 +225,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incl %0", long, int)
210 225
 ATOMIC_FUNC_TEST(dec_and_test, "decl %0", long, int)
211 226
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgl %1, %0", long)
212 227
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", long , long)
228
+ATOMIC_FUNC_XADD(add, "xaddl %0, %1",long) 
213 229
 #endif
214 230
 
215 231
 #define atomic_inc(var) atomic_inc_int(&(var)->val)
... ...
@@ -221,6 +237,7 @@ ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", long , long)
221 237
 #define atomic_get_and_set(var, i) atomic_get_and_set_int(&(var)->val, i)
222 238
 #define atomic_cmpxchg(var, old, newv) \
223 239
 		atomic_cmpxchg_int(&(var)->val, old, newv)
240
+#define atomic_add(var, v) atomic_add_int(&(var)->val, v)
224 241
 
225 242
 
226 243
 #ifdef NOSMP
... ...
@@ -134,6 +134,7 @@ static char* flags=
134 134
 #define at_or	AT_DECL(or)
135 135
 #define at_get_and_set	AT_DECL(get_and_set)
136 136
 #define at_cmpxchg	AT_DECL(cmpxchg)
137
+#define at_add	AT_DECL(add)
137 138
 
138 139
 
139 140
 #define CHECK_ERR(txt, x, y) \
... ...
@@ -215,7 +216,7 @@ int main(int argc, char** argv)
215 216
 	printf(" atomic_and, v should be 2 ............. %2d\n", (int)at_get(v));
216 217
 	
217 218
 	VERIFY(at_or(v, 5), 7);
218
-	printf(" atomic_or,  v should be 7 ............. %2d\n", (int)r);
219
+	printf(" atomic_or,  v should be 7 ............. %2d\n", (int)at_get(v));
219 220
 	VERIFY(r=at_get_and_set(v, 0), 0);
220 221
 	printf(" atomic_get_and_set, v should be 0 ..... %2d\n", (int)at_get(v));
221 222
 	VERIFY(r=at_cmpxchg(v, 0, 7), 7);
... ...
@@ -226,6 +227,14 @@ int main(int argc, char** argv)
226 227
 	CHECK_ERR(cmpxchg, r, 7);
227 228
 	printf(" atomic_cmpxchg (fail), v should be 7 .. %2d\n", (int)at_get(v));
228 229
 	printf("                        r should be 7 .. %2d\n", (int)r);
230
+	VERIFY(r=at_add(v, 2), 9);
231
+	CHECK_ERR(atomic_add, r, 9);
232
+	printf(" atomic_add, v should be 9 ............. %2d\n", (int)at_get(v));
233
+	printf("             r should be 9 ............. %2d\n", (int)r);
234
+	VERIFY(r=at_add(v, -10), -1);
235
+	CHECK_ERR(atomic_add, r, -1);
236
+	printf(" atomic_add, v should be -1 ............ %2d\n", (int)at_get(v));
237
+	printf("             r should be -1 ............ %2d\n", (int)r);
229 238
 
230 239
 	
231 240
 	printf("\ndone.\n");