2 Compatibility <intrin.h> header for GCC -- GCC equivalents of intrinsic
3 Microsoft Visual C++ functions. Originally developed for the ReactOS
4 (<http://www.reactos.org/>) and TinyKrnl (<http://www.tinykrnl.org/>)
7 Copyright (c) 2006 KJK::Hyperion <hackbunny@reactos.com>
9 Permission is hereby granted, free of charge, to any person obtaining a
10 copy of this software and associated documentation files (the "Software"),
11 to deal in the Software without restriction, including without limitation
12 the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 and/or sell copies of the Software, and to permit persons to whom the
14 Software is furnished to do so, subject to the following conditions:
16 The above copyright notice and this permission notice shall be included in
17 all copies or substantial portions of the Software.
19 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 DEALINGS IN THE SOFTWARE.
28 #ifndef KJK_INTRIN_ARM_H_
29 #define KJK_INTRIN_ARM_H_
32 #error Unsupported compiler
35 #define _ReturnAddress() (__builtin_return_address(0))
36 #define _ReadWriteBarrier() __sync_synchronize()
38 __INTRIN_INLINE
void __yield(void) { __asm__
__volatile__("yield"); }
40 __INTRIN_INLINE
void __break(unsigned int value
) { __asm__
__volatile__("bkpt %0": : "M" (value
)); }
42 __INTRIN_INLINE
unsigned short _byteswap_ushort(unsigned short value
)
44 return (value
>> 8) || (value
<< 8);
47 __INTRIN_INLINE
unsigned _CountLeadingZeros(long Mask
)
49 return Mask
? __builtin_clz(Mask
) : 32;
52 __INTRIN_INLINE
unsigned _CountTrailingZeros(long Mask
)
54 return Mask
? __builtin_ctz(Mask
) : 32;
57 __INTRIN_INLINE
unsigned char _BitScanForward(unsigned long * const Index
, const unsigned long Mask
)
59 *Index
= __builtin_ctz(Mask
);
63 __INTRIN_INLINE
char _InterlockedCompareExchange8(volatile char * const Destination
, const char Exchange
, const char Comperand
)
65 return __sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
68 __INTRIN_INLINE
short _InterlockedCompareExchange16(volatile short * const Destination
, const short Exchange
, const short Comperand
)
72 __asm__
__volatile__ ( "0:\n\t"
76 "swp %0, %3, [%2]\n\t"
78 "swpne %3, %0, [%2]\n\t"
81 : "=&r" (a
), "=&r" (b
)
82 : "r" (Destination
), "r" (Exchange
), "r" (Comperand
)
88 __INTRIN_INLINE
short _InterlockedExchangeAdd16(volatile short * const Addend
, const short Value
)
92 __asm__
__volatile__ ( "0:\n\t"
95 "swp %2, %1, [%3]\n\t"
97 "swpne %1, %2, [%3]\n\t"
99 : "=&r" (a
), "=&r" (b
), "=&r" (c
)
100 : "r" (Value
), "r" (Addend
)
106 __INTRIN_INLINE
long _InterlockedCompareExchange(volatile long * const dest
, const long exch
, const long comp
)
110 __asm__
__volatile__ ( "0:\n\t"
114 "swp %0, %3, [%2]\n\t"
116 "swpne %3, %0, [%2]\n\t"
119 : "=&r" (a
), "=&r" (b
)
120 : "r" (dest
), "r" (exch
), "r" (comp
)
126 __INTRIN_INLINE
long long _InterlockedCompareExchange64(volatile long long * const dest
, const long long exch
, const long long comp
)
133 if (*dest
== comp
) *dest
= exch
;
137 __INTRIN_INLINE
void * _InterlockedCompareExchangePointer(void * volatile * const Destination
, void * const Exchange
, void * const Comperand
)
139 return (void*)_InterlockedCompareExchange((volatile long* const)Destination
, (const long)Exchange
, (const long)Comperand
);
143 __INTRIN_INLINE
long _InterlockedExchangeAdd(volatile long * const dest
, const long add
)
147 __asm__
__volatile__ ( "0:\n\t"
150 "swp %2, %1, [%3]\n\t"
152 "swpne %1, %2, [%3]\n\t"
154 : "=&r" (a
), "=&r" (b
), "=&r" (c
)
155 : "r" (dest
), "r" (add
)
161 __INTRIN_INLINE
long _InterlockedExchange(volatile long * const dest
, const long exch
)
165 __asm__
__volatile__ ( "swp %0, %2, [%1]"
167 : "r" (dest
), "r" (exch
));
173 __INTRIN_INLINE
void * _InterlockedExchangePointer(void * volatile * const Target
, void * const Value
)
175 return (void *)_InterlockedExchange((volatile long * const)Target
, (const long)Value
);
180 __INTRIN_INLINE
unsigned char _BitScanReverse(unsigned long * const Index
, const unsigned long Mask
)
182 *Index
= 31 - __builtin_clz(Mask
);
186 __INTRIN_INLINE
char _InterlockedAnd8(volatile char * const value
, const char mask
)
196 y
= _InterlockedCompareExchange8(value
, x
& mask
, x
);
203 __INTRIN_INLINE
short _InterlockedAnd16(volatile short * const value
, const short mask
)
213 y
= _InterlockedCompareExchange16(value
, x
& mask
, x
);
220 __INTRIN_INLINE
long _InterlockedAnd(volatile long * const value
, const long mask
)
230 y
= _InterlockedCompareExchange(value
, x
& mask
, x
);
237 __INTRIN_INLINE
char _InterlockedOr8(volatile char * const value
, const char mask
)
247 y
= _InterlockedCompareExchange8(value
, x
| mask
, x
);
254 __INTRIN_INLINE
short _InterlockedOr16(volatile short * const value
, const short mask
)
264 y
= _InterlockedCompareExchange16(value
, x
| mask
, x
);
271 __INTRIN_INLINE
long _InterlockedOr(volatile long * const value
, const long mask
)
281 y
= _InterlockedCompareExchange(value
, x
| mask
, x
);
288 __INTRIN_INLINE
char _InterlockedXor8(volatile char * const value
, const char mask
)
298 y
= _InterlockedCompareExchange8(value
, x
^ mask
, x
);
305 __INTRIN_INLINE
short _InterlockedXor16(volatile short * const value
, const short mask
)
315 y
= _InterlockedCompareExchange16(value
, x
^ mask
, x
);
322 __INTRIN_INLINE
long _InterlockedXor(volatile long * const value
, const long mask
)
332 y
= _InterlockedCompareExchange(value
, x
^ mask
, x
);
339 __INTRIN_INLINE
long _InterlockedDecrement(volatile long * const lpAddend
)
341 return _InterlockedExchangeAdd(lpAddend
, -1) - 1;
344 __INTRIN_INLINE
long _InterlockedIncrement(volatile long * const lpAddend
)
346 return _InterlockedExchangeAdd(lpAddend
, 1) + 1;
349 __INTRIN_INLINE
long _InterlockedDecrement16(volatile short * const lpAddend
)
351 return _InterlockedExchangeAdd16(lpAddend
, -1) - 1;
354 __INTRIN_INLINE
long _InterlockedIncrement16(volatile short * const lpAddend
)
356 return _InterlockedExchangeAdd16(lpAddend
, 1) + 1;
359 __INTRIN_INLINE
long _InterlockedAddLargeStatistic(volatile long long * const Addend
, const long Value
)
365 __INTRIN_INLINE
void _disable(void)
369 "cpsid i @ __cli" : : : "memory", "cc"
373 __INTRIN_INLINE
void _enable(void)
377 "cpsie i @ __sti" : : : "memory", "cc"
381 __INTRIN_INLINE
unsigned char _interlockedbittestandset(volatile long * a
, const long b
)
383 return (_InterlockedOr(a
, 1 << b
) >> b
) & 1;
386 __INTRIN_INLINE
unsigned char _interlockedbittestandreset(volatile long * a
, const long b
)
388 return (_InterlockedAnd(a
, ~(1 << b
)) >> b
) & 1;
392 __INTRIN_INLINE
unsigned int _rotl(const unsigned int value
, int shift
)
394 return (((value
) << ((int)(shift
))) | ((value
) >> (32 - (int)(shift
))));
399 ({ ULONG __value, __arg = (a); \
400 asm ("clz\t%0, %1": "=r" (__value): "r" (__arg)); \