Commit | Line | Data |
---|---|---|
58de5a4b MD |
1 | #ifndef _URCU_ARCH_UATOMIC_SPARC64_H |
2 | #define _URCU_ARCH_UATOMIC_SPARC64_H | |
3 | ||
4 | /* | |
5 | * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved. | |
6 | * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved. | |
7 | * Copyright (c) 1999-2003 by Hewlett-Packard Company. All rights reserved. | |
8 | * Copyright (c) 2009 Mathieu Desnoyers | |
9 | * | |
10 | * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED | |
11 | * OR IMPLIED. ANY USE IS AT YOUR OWN RISK. | |
12 | * | |
13 | * Permission is hereby granted to use or copy this program | |
14 | * for any purpose, provided the above notices are retained on all copies. | |
15 | * Permission to modify the code and to distribute modified code is granted, | |
16 | * provided the above notices are retained, and a notice that the code was | |
17 | * modified is included with the above copyright notice. | |
18 | * | |
19 | * Code inspired from libuatomic_ops-1.2, inherited in part from the | |
20 | * Boehm-Demers-Weiser conservative garbage collector. | |
21 | */ | |
22 | ||
23 | #include <urcu/compiler.h> | |
24 | #include <urcu/system.h> | |
25 | ||
36bc70a8 MD |
26 | #ifdef __cplusplus |
27 | extern "C" { | |
28 | #endif | |
29 | ||
58de5a4b | 30 | #ifndef __SIZEOF_LONG__ |
795d506a | 31 | #ifdef __LP64__ |
58de5a4b MD |
32 | #define __SIZEOF_LONG__ 8 |
33 | #else | |
34 | #define __SIZEOF_LONG__ 4 | |
35 | #endif | |
36 | #endif | |
37 | ||
38 | #ifndef BITS_PER_LONG | |
39 | #define BITS_PER_LONG (__SIZEOF_LONG__ * 8) | |
40 | #endif | |
41 | ||
42 | #define uatomic_set(addr, v) STORE_SHARED(*(addr), (v)) | |
43 | #define uatomic_read(addr) LOAD_SHARED(*(addr)) | |
44 | ||
45 | /* cmpxchg */ | |
46 | ||
47 | static inline __attribute__((always_inline)) | |
48 | unsigned long _uatomic_cmpxchg(void *addr, unsigned long old, | |
49 | unsigned long _new, int len) | |
50 | { | |
51 | switch (len) { | |
52 | case 4: | |
53 | { | |
54 | __asm__ __volatile__ ( | |
55 | "membar #StoreLoad | #LoadLoad\n\t" | |
56 | "cas [%1],%2,%0\n\t" | |
57 | "membar #StoreLoad | #StoreStore\n\t" | |
58 | : "+&r" (_new) | |
59 | : "r" (addr), "r" (old) | |
60 | : "memory"); | |
61 | ||
62 | return _new; | |
63 | } | |
64 | #if (BITS_PER_LONG == 64) | |
65 | case 8: | |
66 | { | |
67 | __asm__ __volatile__ ( | |
68 | "membar #StoreLoad | #LoadLoad\n\t" | |
69 | "casx [%1],%2,%0\n\t" | |
70 | "membar #StoreLoad | #StoreStore\n\t" | |
71 | : "+&r" (_new) | |
72 | : "r" (addr), "r" (old) | |
73 | : "memory"); | |
74 | ||
75 | return _new; | |
76 | } | |
77 | #endif | |
78 | } | |
79 | __builtin_trap(); | |
80 | return 0; | |
81 | } | |
82 | ||
83 | ||
84 | #define uatomic_cmpxchg(addr, old, _new) \ | |
85 | ((__typeof__(*(addr))) _uatomic_cmpxchg((addr), (unsigned long)(old),\ | |
86 | (unsigned long)(_new), \ | |
87 | sizeof(*(addr)))) | |
88 | ||
89 | /* xchg */ | |
90 | ||
91 | static inline __attribute__((always_inline)) | |
92 | unsigned long _uatomic_exchange(void *addr, unsigned long val, int len) | |
93 | { | |
94 | switch (len) { | |
95 | case 4: | |
96 | { | |
c0a68bfa | 97 | unsigned int old, oldt; |
58de5a4b | 98 | |
c0a68bfa | 99 | oldt = uatomic_read((unsigned int *)addr); |
58de5a4b MD |
100 | do { |
101 | old = oldt; | |
102 | oldt = _uatomic_cmpxchg(addr, old, val, 4); | |
103 | } while (oldt != old); | |
104 | ||
105 | return old; | |
106 | } | |
107 | #if (BITS_PER_LONG == 64) | |
108 | case 8: | |
109 | { | |
c0a68bfa | 110 | unsigned long old, oldt; |
58de5a4b | 111 | |
c0a68bfa | 112 | oldt = uatomic_read((unsigned long *)addr); |
58de5a4b MD |
113 | do { |
114 | old = oldt; | |
115 | oldt = _uatomic_cmpxchg(addr, old, val, 8); | |
116 | } while (oldt != old); | |
117 | ||
118 | return old; | |
119 | } | |
120 | #endif | |
121 | } | |
122 | __builtin_trap(); | |
123 | return 0; | |
124 | } | |
125 | ||
126 | #define uatomic_xchg(addr, v) \ | |
127 | ((__typeof__(*(addr))) _uatomic_exchange((addr), (unsigned long)(v), \ | |
128 | sizeof(*(addr)))) | |
129 | ||
130 | /* uatomic_add_return */ | |
131 | ||
132 | static inline __attribute__((always_inline)) | |
9c697e4d | 133 | unsigned long _uatomic_add_return(void *addr, unsigned long val, int len) |
58de5a4b MD |
134 | { |
135 | switch (len) { | |
136 | case 4: | |
137 | { | |
c0a68bfa | 138 | unsigned int old, oldt; |
58de5a4b | 139 | |
c0a68bfa | 140 | oldt = uatomic_read((unsigned int *)addr); |
58de5a4b MD |
141 | do { |
142 | old = oldt; | |
143 | oldt = _uatomic_cmpxchg(addr, old, old + val, 4); | |
144 | } while (oldt != old); | |
145 | ||
146 | return old + val; | |
147 | } | |
148 | #if (BITS_PER_LONG == 64) | |
149 | case 8: | |
150 | { | |
c0a68bfa | 151 | unsigned long old, oldt; |
58de5a4b | 152 | |
c0a68bfa | 153 | oldt = uatomic_read((unsigned long *)addr); |
58de5a4b MD |
154 | do { |
155 | old = oldt; | |
156 | oldt = _uatomic_cmpxchg(addr, old, old + val, 8); | |
157 | } while (oldt != old); | |
158 | ||
159 | return old + val; | |
160 | } | |
161 | #endif | |
162 | } | |
163 | __builtin_trap(); | |
164 | return 0; | |
165 | } | |
166 | ||
167 | #define uatomic_add_return(addr, v) \ | |
168 | ((__typeof__(*(addr))) _uatomic_add_return((addr), \ | |
169 | (unsigned long)(v), \ | |
170 | sizeof(*(addr)))) | |
171 | ||
172 | /* uatomic_sub_return, uatomic_add, uatomic_sub, uatomic_inc, uatomic_dec */ | |
173 | ||
174 | #define uatomic_sub_return(addr, v) uatomic_add_return((addr), -(v)) | |
175 | ||
176 | #define uatomic_add(addr, v) (void)uatomic_add_return((addr), (v)) | |
177 | #define uatomic_sub(addr, v) (void)uatomic_sub_return((addr), (v)) | |
178 | ||
179 | #define uatomic_inc(addr) uatomic_add((addr), 1) | |
180 | #define uatomic_dec(addr) uatomic_add((addr), -1) | |
181 | ||
58de5a4b MD |
182 | #define compat_uatomic_cmpxchg(ptr, old, _new) uatomic_cmpxchg(ptr, old, _new) |
183 | ||
36bc70a8 MD |
184 | #ifdef __cplusplus |
185 | } | |
186 | #endif | |
187 | ||
58de5a4b | 188 | #endif /* _URCU_ARCH_UATOMIC_PPC_H */ |