Browse code

- x86: atomic_add - sparc64: atomic_cmpxchg, atomic_add

Andrei Pelinescu-Onciul authored on 08/05/2007 19:41:59
Showing 3 changed files
... ...
@@ -38,6 +38,7 @@
38 38
  * History:
39 39
  * --------
40 40
  *  2006-03-28  created by andrei
41
+ *  2007-05-08 added atomic_add and atomic_cmpxchg (andrei)
41 42
  */
42 43
 
43 44
 
... ...
@@ -121,6 +122,34 @@
121 121
 		return RET_EXPR; \
122 122
 	}
123 123
 
124
+/* same as above, but uses a short 1 op sequence 
125
+ * %2 (or %1) is var, %0 is  v and return (ret)*/
126
+#define ATOMIC_FUNC_DECL1_RAW(NAME, OP, P_TYPE, RET_TYPE, RET_EXPR) \
127
+	inline static RET_TYPE atomic_##NAME##_##P_TYPE (volatile P_TYPE *var, \
128
+															P_TYPE v) \
129
+	{ \
130
+		P_TYPE ret; \
131
+		asm volatile( \
132
+			OP "\n\t" \
133
+			: "=&r"(ret), "=m"(*var) : "r"(var), "0"(v) : "cc" \
134
+			); \
135
+		return RET_EXPR; \
136
+	}
137
+
138
+/* same as above, but takes two extra params, v, which goes in %4
139
+ * and uses a short 1 op sequence:
140
+ * %2 (or %1) is var, %3 is v1 and %0 is v2 & result (ret) */
141
+#define ATOMIC_FUNC_DECL2_CAS(NAME, OP, P_TYPE, RET_TYPE, RET_EXPR) \
142
+	inline static RET_TYPE atomic_##NAME##_##P_TYPE (volatile P_TYPE *var, \
143
+													P_TYPE v1, P_TYPE v2) \
144
+	{ \
145
+		P_TYPE ret; \
146
+		asm volatile( \
147
+			OP "\n\t" \
148
+			: "=&r"(ret), "=m"(*var) : "r"(var), "r"(v1), "0"(v2) : "cc" \
149
+			); \
150
+		return RET_EXPR; \
151
+	}
124 152
 
125 153
 
126 154
 
... ...
@@ -130,7 +159,11 @@ ATOMIC_FUNC_DECL1(and,     "and  %0, %4, %1", int, void, /* no return */ )
130 130
 ATOMIC_FUNC_DECL1(or,      "or   %0, %4, %1", int, void, /* no return */ )
131 131
 ATOMIC_FUNC_DECL(inc_and_test, "add   %0, 1, %1", int, int, ((ret+1)==0) )
132 132
 ATOMIC_FUNC_DECL(dec_and_test, "sub   %0, 1, %1", int, int, ((ret-1)==0) )
133
-ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , int, int,  ret)
133
+/* deprecated but probably better then CAS for futexes */
134
+ATOMIC_FUNC_DECL1_RAW(get_and_set, "swap [%2], %0", int, int, ret)
135
+/*ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , int, int,  ret)*/
136
+ATOMIC_FUNC_DECL1(add,     "add  %0, %4, %1", int, int,  ret+v)
137
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "cas  [%2], %3, %0", int, int,  ret)
134 138
 
135 139
 
136 140
 ATOMIC_FUNC_DECL(inc,      "add  %0,  1, %1", long, void, /* no return */ )
... ...
@@ -140,6 +173,12 @@ ATOMIC_FUNC_DECL1(or,      "or   %0, %4, %1", long, void, /* no return */ )
140 140
 ATOMIC_FUNC_DECL(inc_and_test, "add   %0, 1, %1", long, long, ((ret+1)==0) )
141 141
 ATOMIC_FUNC_DECL(dec_and_test, "sub   %0, 1, %1", long, long, ((ret-1)==0) )
142 142
 ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , long, long,  ret)
143
+ATOMIC_FUNC_DECL1(add,     "add  %0, %4, %1", long, long,  ret+v)
144
+#ifdef SPARC64_MODE
145
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "casx  [%2], %3, %0", long, long,  ret)
146
+#else
147
+ATOMIC_FUNC_DECL2_CAS(cmpxchg, "cas   [%2], %3, %0", long, long,  ret)
148
+#endif
143 149
 
144 150
 
145 151
 #define atomic_inc(var) atomic_inc_int(&(var)->val)
... ...
@@ -149,6 +188,10 @@ ATOMIC_FUNC_DECL1(get_and_set, "mov %4, %1" , long, long,  ret)
149 149
 #define atomic_dec_and_test(var) atomic_dec_and_test_int(&(var)->val)
150 150
 #define atomic_inc_and_test(var) atomic_inc_and_test_int(&(var)->val)
151 151
 #define atomic_get_and_set(var, i) atomic_get_and_set_int(&(var)->val, i)
152
+#define atomic_add(var, i) atomic_add_int(&(var)->val, i)
153
+#define atomic_cmpxchg(var, old, new_v) \
154
+	atomic_cmpxchg_int(&(var)->val, old, new_v)
155
+
152 156
 
153 157
 
154 158
 /* with integrated membar */
... ...
@@ -43,6 +43,7 @@
43 43
  * --------
44 44
  *  2006-03-08  created by andrei
45 45
  *  2007-05-07  added cmpxchg (andrei)
46
+ *  2007-05-08  added atomic_add (andrei)
46 47
  */
47 48
 
48 49
 #ifndef _atomic_x86_h
... ...
@@ -184,6 +185,18 @@
184 184
 		return ret; \
185 185
 	}
186 186
 
187
+/* similar w/ XCHG but with LOCK prefix, relaxed constraints & diff. return */
188
+#define ATOMIC_FUNC_XADD(NAME, OP, TYPE) \
189
+	inline static TYPE atomic_##NAME##_##TYPE(volatile TYPE* var, TYPE v) \
190
+{ \
191
+	TYPE ret; \
192
+	asm volatile( \
193
+			__LOCK_PREF " " OP " \n\t" \
194
+			: "=r"(ret), "=m"(*var) :"m"(*var), "0"(v) : "cc", "memory" \
195
+			); \
196
+	return ret+v; \
197
+}
198
+
187 199
 ATOMIC_FUNC_DECL1(inc, "incl %0", int)
188 200
 ATOMIC_FUNC_DECL1(dec, "decl %0", int)
189 201
 ATOMIC_FUNC_DECL2(and, "andl %1, %0", int)
... ...
@@ -192,6 +205,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incl %0", int, int)
192 192
 ATOMIC_FUNC_TEST(dec_and_test, "decl %0", int, int)
193 193
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgl %1, %0", int)
194 194
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", int , int)
195
+ATOMIC_FUNC_XADD(add, "xaddl %0, %1", int) 
195 196
 #ifdef __CPU_x86_64
196 197
 ATOMIC_FUNC_DECL1(inc, "incq %0", long)
197 198
 ATOMIC_FUNC_DECL1(dec, "decq %0", long)
... ...
@@ -201,6 +215,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incq %0", long, int)
201 201
 ATOMIC_FUNC_TEST(dec_and_test, "decq %0", long, int)
202 202
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgq %1, %0", long)
203 203
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgq %2, %1", long , long)
204
+ATOMIC_FUNC_XADD(add, "xaddq %0, %1",long) 
204 205
 #else
205 206
 ATOMIC_FUNC_DECL1(inc, "incl %0", long)
206 207
 ATOMIC_FUNC_DECL1(dec, "decl %0", long)
... ...
@@ -210,6 +225,7 @@ ATOMIC_FUNC_TEST(inc_and_test, "incl %0", long, int)
210 210
 ATOMIC_FUNC_TEST(dec_and_test, "decl %0", long, int)
211 211
 ATOMIC_FUNC_XCHG(get_and_set,  "xchgl %1, %0", long)
212 212
 ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", long , long)
213
+ATOMIC_FUNC_XADD(add, "xaddl %0, %1",long) 
213 214
 #endif
214 215
 
215 216
 #define atomic_inc(var) atomic_inc_int(&(var)->val)
... ...
@@ -221,6 +237,7 @@ ATOMIC_FUNC_CMPXCHG(cmpxchg, "cmpxchgl %2, %1", long , long)
221 221
 #define atomic_get_and_set(var, i) atomic_get_and_set_int(&(var)->val, i)
222 222
 #define atomic_cmpxchg(var, old, newv) \
223 223
 		atomic_cmpxchg_int(&(var)->val, old, newv)
224
+#define atomic_add(var, v) atomic_add_int(&(var)->val, v)
224 225
 
225 226
 
226 227
 #ifdef NOSMP
... ...
@@ -134,6 +134,7 @@ static char* flags=
134 134
 #define at_or	AT_DECL(or)
135 135
 #define at_get_and_set	AT_DECL(get_and_set)
136 136
 #define at_cmpxchg	AT_DECL(cmpxchg)
137
+#define at_add	AT_DECL(add)
137 138
 
138 139
 
139 140
 #define CHECK_ERR(txt, x, y) \
... ...
@@ -215,7 +216,7 @@ int main(int argc, char** argv)
215 215
 	printf(" atomic_and, v should be 2 ............. %2d\n", (int)at_get(v));
216 216
 	
217 217
 	VERIFY(at_or(v, 5), 7);
218
-	printf(" atomic_or,  v should be 7 ............. %2d\n", (int)r);
218
+	printf(" atomic_or,  v should be 7 ............. %2d\n", (int)at_get(v));
219 219
 	VERIFY(r=at_get_and_set(v, 0), 0);
220 220
 	printf(" atomic_get_and_set, v should be 0 ..... %2d\n", (int)at_get(v));
221 221
 	VERIFY(r=at_cmpxchg(v, 0, 7), 7);
... ...
@@ -226,6 +227,14 @@ int main(int argc, char** argv)
226 226
 	CHECK_ERR(cmpxchg, r, 7);
227 227
 	printf(" atomic_cmpxchg (fail), v should be 7 .. %2d\n", (int)at_get(v));
228 228
 	printf("                        r should be 7 .. %2d\n", (int)r);
229
+	VERIFY(r=at_add(v, 2), 9);
230
+	CHECK_ERR(atomic_add, r, 9);
231
+	printf(" atomic_add, v should be 9 ............. %2d\n", (int)at_get(v));
232
+	printf("             r should be 9 ............. %2d\n", (int)r);
233
+	VERIFY(r=at_add(v, -10), -1);
234
+	CHECK_ERR(atomic_add, r, -1);
235
+	printf(" atomic_add, v should be -1 ............ %2d\n", (int)at_get(v));
236
+	printf("             r should be -1 ............ %2d\n", (int)r);
229 237
 
230 238
 	
231 239
 	printf("\ndone.\n");