99ddb47a841fc5e95f5eebb8050bd1006963c950
[reactos.git] / reactos / include / crt / mingw32 / intrin_arm.h
1 /*
2 Compatibility <intrin.h> header for GCC -- GCC equivalents of intrinsic
3 Microsoft Visual C++ functions. Originally developed for the ReactOS
4 (<http://www.reactos.org/>) and TinyKrnl (<http://www.tinykrnl.org/>)
5 projects.
6
7 Copyright (c) 2006 KJK::Hyperion <hackbunny@reactos.com>
8
9 Permission is hereby granted, free of charge, to any person obtaining a
10 copy of this software and associated documentation files (the "Software"),
11 to deal in the Software without restriction, including without limitation
12 the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 and/or sell copies of the Software, and to permit persons to whom the
14 Software is furnished to do so, subject to the following conditions:
15
16 The above copyright notice and this permission notice shall be included in
17 all copies or substantial portions of the Software.
18
19 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 DEALINGS IN THE SOFTWARE.
26 */
27
28 #ifndef KJK_INTRIN_ARM_H_
29 #define KJK_INTRIN_ARM_H_
30
31 #ifndef __GNUC__
32 #error Unsupported compiler
33 #endif
34
35 #define _ReturnAddress() (__builtin_return_address(0))
36 #define _ReadWriteBarrier() __sync_synchronize()
37
38 __INTRIN_INLINE unsigned _CountLeadingZeros(long Mask)
39 {
40 return Mask ? __builtin_clz(Mask) : 32;
41 }
42
43 __INTRIN_INLINE unsigned _CountTrailingZeros(long Mask)
44 {
45 return Mask ? __builtin_ctz(Mask) : 32;
46 }
47
48 __INTRIN_INLINE unsigned char _BitScanForward(unsigned long * const Index, const unsigned long Mask)
49 {
50 *Index = __builtin_ctz(Mask);
51 return Mask ? 1 : 0;
52 }
53
54 __INTRIN_INLINE char _InterlockedCompareExchange8(volatile char * const Destination, const char Exchange, const char Comperand)
55 {
56 return __sync_val_compare_and_swap(Destination, Comperand, Exchange);
57 }
58
59 __INTRIN_INLINE short _InterlockedCompareExchange16(volatile short * const Destination, const short Exchange, const short Comperand)
60 {
61 return __sync_val_compare_and_swap(Destination, Comperand, Exchange);
62 }
63
64 __INTRIN_INLINE long _InterlockedExchangeAdd16(volatile short * const Addend, const short Value)
65 {
66 return __sync_fetch_and_add(Addend, Value);
67 }
68
69 __INTRIN_INLINE long _InterlockedCompareExchange(volatile long * const dest, const long exch, const long comp)
70 {
71 long a, b;
72
73 __asm__ __volatile__ ( "0:\n\t"
74 "ldr %1, [%2]\n\t"
75 "cmp %1, %4\n\t"
76 "bne 1f\n\t"
77 "swp %0, %3, [%2]\n\t"
78 "cmp %0, %1\n\t"
79 "swpne %3, %0, [%2]\n\t"
80 "bne 0b\n\t"
81 "1:"
82 : "=&r" (a), "=&r" (b)
83 : "r" (dest), "r" (exch), "r" (comp)
84 : "cc", "memory");
85
86 return a;
87 }
88
89 __INTRIN_INLINE long long _InterlockedCompareExchange64(volatile long long * const dest, const long long exch, const long long comp)
90 {
91 //
92 // FIXME
93 //
94 long long result;
95 result = *dest;
96 if (*dest == comp) *dest = exch;
97 return result;
98 }
99
100 __INTRIN_INLINE void * _InterlockedCompareExchangePointer(void * volatile * const Destination, void * const Exchange, void * const Comperand)
101 {
102 return (void*)_InterlockedCompareExchange((volatile long* const)Destination, (const long)Exchange, (const long)Comperand);
103 }
104
105
106 __INTRIN_INLINE long _InterlockedExchangeAdd(volatile long * const dest, const long add)
107 {
108 long a, b, c;
109
110 __asm__ __volatile__ ( "0:\n\t"
111 "ldr %0, [%3]\n\t"
112 "add %1, %0, %4\n\t"
113 "swp %2, %1, [%3]\n\t"
114 "cmp %0, %2\n\t"
115 "swpne %1, %2, [%3]\n\t"
116 "bne 0b"
117 : "=&r" (a), "=&r" (b), "=&r" (c)
118 : "r" (dest), "r" (add)
119 : "cc", "memory");
120
121 return a;
122 }
123
124 __INTRIN_INLINE long _InterlockedExchange(volatile long * const dest, const long exch)
125 {
126 long a;
127
128 __asm__ __volatile__ ( "swp %0, %2, [%1]"
129 : "=&r" (a)
130 : "r" (dest), "r" (exch));
131
132 return a;
133 }
134
135
136 __INTRIN_INLINE void * _InterlockedExchangePointer(void * volatile * const Target, void * const Value)
137 {
138 return (void *)_InterlockedExchange((volatile long * const)Target, (const long)Value);
139 }
140
141
142
143 __INTRIN_INLINE unsigned char _BitScanReverse(unsigned long * const Index, const unsigned long Mask)
144 {
145 *Index = 31 - __builtin_clz(Mask);
146 return Mask ? 1 : 0;
147 }
148
149 __INTRIN_INLINE char _InterlockedAnd8(volatile char * const value, const char mask)
150 {
151 char x;
152 char y;
153
154 y = *value;
155
156 do
157 {
158 x = y;
159 y = _InterlockedCompareExchange8(value, x & mask, x);
160 }
161 while(y != x);
162
163 return y;
164 }
165
166 __INTRIN_INLINE short _InterlockedAnd16(volatile short * const value, const short mask)
167 {
168 short x;
169 short y;
170
171 y = *value;
172
173 do
174 {
175 x = y;
176 y = _InterlockedCompareExchange16(value, x & mask, x);
177 }
178 while(y != x);
179
180 return y;
181 }
182
183 __INTRIN_INLINE long _InterlockedAnd(volatile long * const value, const long mask)
184 {
185 long x;
186 long y;
187
188 y = *value;
189
190 do
191 {
192 x = y;
193 y = _InterlockedCompareExchange(value, x & mask, x);
194 }
195 while(y != x);
196
197 return y;
198 }
199
200 __INTRIN_INLINE char _InterlockedOr8(volatile char * const value, const char mask)
201 {
202 char x;
203 char y;
204
205 y = *value;
206
207 do
208 {
209 x = y;
210 y = _InterlockedCompareExchange8(value, x | mask, x);
211 }
212 while(y != x);
213
214 return y;
215 }
216
217 __INTRIN_INLINE short _InterlockedOr16(volatile short * const value, const short mask)
218 {
219 short x;
220 short y;
221
222 y = *value;
223
224 do
225 {
226 x = y;
227 y = _InterlockedCompareExchange16(value, x | mask, x);
228 }
229 while(y != x);
230
231 return y;
232 }
233
234 __INTRIN_INLINE long _InterlockedOr(volatile long * const value, const long mask)
235 {
236 long x;
237 long y;
238
239 y = *value;
240
241 do
242 {
243 x = y;
244 y = _InterlockedCompareExchange(value, x | mask, x);
245 }
246 while(y != x);
247
248 return y;
249 }
250
251 __INTRIN_INLINE char _InterlockedXor8(volatile char * const value, const char mask)
252 {
253 char x;
254 char y;
255
256 y = *value;
257
258 do
259 {
260 x = y;
261 y = _InterlockedCompareExchange8(value, x ^ mask, x);
262 }
263 while(y != x);
264
265 return y;
266 }
267
268 __INTRIN_INLINE short _InterlockedXor16(volatile short * const value, const short mask)
269 {
270 short x;
271 short y;
272
273 y = *value;
274
275 do
276 {
277 x = y;
278 y = _InterlockedCompareExchange16(value, x ^ mask, x);
279 }
280 while(y != x);
281
282 return y;
283 }
284
285 __INTRIN_INLINE long _InterlockedXor(volatile long * const value, const long mask)
286 {
287 long x;
288 long y;
289
290 y = *value;
291
292 do
293 {
294 x = y;
295 y = _InterlockedCompareExchange(value, x ^ mask, x);
296 }
297 while(y != x);
298
299 return y;
300 }
301
302 __INTRIN_INLINE long _InterlockedDecrement(volatile long * const lpAddend)
303 {
304 return _InterlockedExchangeAdd(lpAddend, -1) - 1;
305 }
306
307 __INTRIN_INLINE long _InterlockedIncrement(volatile long * const lpAddend)
308 {
309 return _InterlockedExchangeAdd(lpAddend, 1) + 1;
310 }
311
312 __INTRIN_INLINE long _InterlockedDecrement16(volatile short * const lpAddend)
313 {
314 return _InterlockedExchangeAdd16(lpAddend, -1) - 1;
315 }
316
317 __INTRIN_INLINE long _InterlockedIncrement16(volatile short * const lpAddend)
318 {
319 return _InterlockedExchangeAdd16(lpAddend, 1) + 1;
320 }
321
322 __INTRIN_INLINE long _InterlockedAddLargeStatistic(volatile long long * const Addend, const long Value)
323 {
324 *Addend += Value;
325 return Value;
326 }
327
328 __INTRIN_INLINE void _disable(void)
329 {
330 __asm__ __volatile__
331 (
332 "cpsid i @ __cli" : : : "memory", "cc"
333 );
334 }
335
336 __INTRIN_INLINE void _enable(void)
337 {
338 __asm__ __volatile__
339 (
340 "cpsie i @ __sti" : : : "memory", "cc"
341 );
342 }
343
344 __INTRIN_INLINE unsigned char _interlockedbittestandset(volatile long * a, const long b)
345 {
346 return (_InterlockedOr(a, 1 << b) >> b) & 1;
347 }
348
349 __INTRIN_INLINE unsigned char _interlockedbittestandreset(volatile long * a, const long b)
350 {
351 return (_InterlockedAnd(a, ~(1 << b)) >> b) & 1;
352 }
353
354 #ifndef __MSVCRT__
355 __INTRIN_INLINE unsigned int _rotl(const unsigned int value, int shift)
356 {
357 return (((value) << ((int)(shift))) | ((value) >> (32 - (int)(shift))));
358 }
359 #endif
360
361 #define _clz(a) \
362 ({ ULONG __value, __arg = (a); \
363 asm ("clz\t%0, %1": "=r" (__value): "r" (__arg)); \
364 __value; })
365
366 #endif
367 /* EOF */