2 Compatibility <intrin_x86.h> header for GCC -- GCC equivalents of intrinsic
3 Microsoft Visual C++ functions. Originally developed for the ReactOS
4 (<http://www.reactos.org/>) and TinyKrnl (<http://www.tinykrnl.org/>)
7 Copyright (c) 2006 KJK::Hyperion <hackbunny@reactos.com>
9 Permission is hereby granted, free of charge, to any person obtaining a
10 copy of this software and associated documentation files (the "Software"),
11 to deal in the Software without restriction, including without limitation
12 the rights to use, copy, modify, merge, publish, distribute, sublicense,
13 and/or sell copies of the Software, and to permit persons to whom the
14 Software is furnished to do so, subject to the following conditions:
16 The above copyright notice and this permission notice shall be included in
17 all copies or substantial portions of the Software.
19 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25 DEALINGS IN THE SOFTWARE.
28 #ifndef KJK_INTRIN_X86_H_
29 #define KJK_INTRIN_X86_H_
32 FIXME: review all "memory" clobbers, add/remove to match Visual C++
33 behavior: some "obvious" memory barriers are not present in the Visual C++
34 implementation - e.g. __stosX; on the other hand, some memory barriers that
35 *are* present could have been missed
39 NOTE: this is a *compatibility* header. Some functions may look wrong at
40 first, but they're only "as wrong" as they would be on Visual C++. Our
41 priority is compatibility
43 NOTE: unlike most people who write inline asm for GCC, I didn't pull the
44 constraints and the uses of __volatile__ out of my... hat. Do not touch
45 them. I hate cargo cult programming
47 NOTE: be very careful with declaring "memory" clobbers. Some "obvious"
48 barriers aren't there in Visual C++ (e.g. __stosX)
50 NOTE: review all intrinsics with a return value, add/remove __volatile__
51 where necessary. If an intrinsic whose value is ignored generates a no-op
52 under Visual C++, __volatile__ must be omitted; if it always generates code
53 (for example, if it has side effects), __volatile__ must be specified. GCC
54 will only optimize out non-volatile asm blocks with outputs, so input-only
55 blocks are safe. Oddities such as the non-volatile 'rdmsr' are intentional
56 and follow Visual C++ behavior
58 NOTE: on GCC 4.1.0, please use the __sync_* built-ins for barriers and
59 atomic operations. Test the version like this:
61 #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100
64 Pay attention to the type of barrier. Make it match with what Visual C++
65 would use in the same case
72 /*** Stack frame juggling ***/
73 #define _ReturnAddress() (__builtin_return_address(0))
74 #define _AddressOfReturnAddress() (&(((void **)(__builtin_frame_address(0)))[1]))
75 /* TODO: __getcallerseflags but how??? */
77 /* Maybe the same for x86? */
79 #define _alloca(s) __builtin_alloca(s)
82 /*** Memory barriers ***/
84 __INTRIN_INLINE
void _ReadWriteBarrier(void)
86 __asm__
__volatile__("" : : : "memory");
89 /* GCC only supports full barriers */
90 #define _ReadBarrier _ReadWriteBarrier
91 #define _WriteBarrier _ReadWriteBarrier
93 __INTRIN_INLINE
void _mm_mfence(void)
95 __asm__
__volatile__("mfence" : : : "memory");
98 __INTRIN_INLINE
void _mm_lfence(void)
101 __asm__
__volatile__("lfence");
105 __INTRIN_INLINE
void _mm_sfence(void)
108 __asm__
__volatile__("sfence");
113 __INTRIN_INLINE
void __faststorefence(void)
116 __asm__
__volatile__("lock; orl $0, %0;" : : "m"(local
));
121 /*** Atomic operations ***/
123 #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100
125 __INTRIN_INLINE
char _InterlockedCompareExchange8(volatile char * const Destination
, const char Exchange
, const char Comperand
)
127 return __sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
130 __INTRIN_INLINE
short _InterlockedCompareExchange16(volatile short * const Destination
, const short Exchange
, const short Comperand
)
132 return __sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
135 __INTRIN_INLINE
long _InterlockedCompareExchange(volatile long * const Destination
, const long Exchange
, const long Comperand
)
137 return __sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
140 __INTRIN_INLINE
void * _InterlockedCompareExchangePointer(void * volatile * const Destination
, void * const Exchange
, void * const Comperand
)
142 return (void *)__sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
145 __INTRIN_INLINE
long _InterlockedExchange(volatile long * const Target
, const long Value
)
147 /* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
148 __sync_synchronize();
149 return __sync_lock_test_and_set(Target
, Value
);
152 #if defined(_M_AMD64)
153 __INTRIN_INLINE
long long _InterlockedExchange64(volatile long long * const Target
, const long long Value
)
155 /* NOTE: __sync_lock_test_and_set would be an acquire barrier, so we force a full barrier */
156 __sync_synchronize();
157 return __sync_lock_test_and_set(Target
, Value
);
161 __INTRIN_INLINE
void * _InterlockedExchangePointer(void * volatile * const Target
, void * const Value
)
164 __sync_synchronize();
165 return (void *)__sync_lock_test_and_set(Target
, Value
);
168 __INTRIN_INLINE
long _InterlockedExchangeAdd16(volatile short * const Addend
, const short Value
)
170 return __sync_fetch_and_add(Addend
, Value
);
173 __INTRIN_INLINE
long _InterlockedExchangeAdd(volatile long * const Addend
, const long Value
)
175 return __sync_fetch_and_add(Addend
, Value
);
178 #if defined(_M_AMD64)
179 __INTRIN_INLINE
long long _InterlockedExchangeAdd64(volatile long long * const Addend
, const long long Value
)
181 return __sync_fetch_and_add(Addend
, Value
);
185 __INTRIN_INLINE
char _InterlockedAnd8(volatile char * const value
, const char mask
)
187 return __sync_fetch_and_and(value
, mask
);
190 __INTRIN_INLINE
short _InterlockedAnd16(volatile short * const value
, const short mask
)
192 return __sync_fetch_and_and(value
, mask
);
195 __INTRIN_INLINE
long _InterlockedAnd(volatile long * const value
, const long mask
)
197 return __sync_fetch_and_and(value
, mask
);
200 #if defined(_M_AMD64)
201 __INTRIN_INLINE
long _InterlockedAnd64(volatile long long * const value
, const long long mask
)
203 return __sync_fetch_and_and(value
, mask
);
207 __INTRIN_INLINE
char _InterlockedOr8(volatile char * const value
, const char mask
)
209 return __sync_fetch_and_or(value
, mask
);
212 __INTRIN_INLINE
short _InterlockedOr16(volatile short * const value
, const short mask
)
214 return __sync_fetch_and_or(value
, mask
);
217 __INTRIN_INLINE
long _InterlockedOr(volatile long * const value
, const long mask
)
219 return __sync_fetch_and_or(value
, mask
);
222 #if defined(_M_AMD64)
223 __INTRIN_INLINE
long _InterlockedOr64(volatile long long * const value
, const long long mask
)
225 return __sync_fetch_and_or(value
, mask
);
229 __INTRIN_INLINE
char _InterlockedXor8(volatile char * const value
, const char mask
)
231 return __sync_fetch_and_xor(value
, mask
);
234 __INTRIN_INLINE
short _InterlockedXor16(volatile short * const value
, const short mask
)
236 return __sync_fetch_and_xor(value
, mask
);
239 __INTRIN_INLINE
long _InterlockedXor(volatile long * const value
, const long mask
)
241 return __sync_fetch_and_xor(value
, mask
);
246 __INTRIN_INLINE
char _InterlockedCompareExchange8(volatile char * const Destination
, const char Exchange
, const char Comperand
)
248 char retval
= Comperand
;
249 __asm__("lock; cmpxchgb %b[Exchange], %[Destination]" : [retval
] "+a" (retval
) : [Destination
] "m" (*Destination
), [Exchange
] "q" (Exchange
) : "memory");
253 __INTRIN_INLINE
short _InterlockedCompareExchange16(volatile short * const Destination
, const short Exchange
, const short Comperand
)
255 short retval
= Comperand
;
256 __asm__("lock; cmpxchgw %w[Exchange], %[Destination]" : [retval
] "+a" (retval
) : [Destination
] "m" (*Destination
), [Exchange
] "q" (Exchange
): "memory");
260 __INTRIN_INLINE
long _InterlockedCompareExchange(volatile long * const Destination
, const long Exchange
, const long Comperand
)
262 long retval
= Comperand
;
263 __asm__("lock; cmpxchgl %k[Exchange], %[Destination]" : [retval
] "+a" (retval
) : [Destination
] "m" (*Destination
), [Exchange
] "q" (Exchange
): "memory");
267 __INTRIN_INLINE
void * _InterlockedCompareExchangePointer(void * volatile * const Destination
, void * const Exchange
, void * const Comperand
)
269 void * retval
= (void *)Comperand
;
270 __asm__("lock; cmpxchgl %k[Exchange], %[Destination]" : [retval
] "=a" (retval
) : "[retval]" (retval
), [Destination
] "m" (*Destination
), [Exchange
] "q" (Exchange
) : "memory");
274 __INTRIN_INLINE
long _InterlockedExchange(volatile long * const Target
, const long Value
)
277 __asm__("xchgl %[retval], %[Target]" : [retval
] "+r" (retval
) : [Target
] "m" (*Target
) : "memory");
281 __INTRIN_INLINE
void * _InterlockedExchangePointer(void * volatile * const Target
, void * const Value
)
283 void * retval
= Value
;
284 __asm__("xchgl %[retval], %[Target]" : [retval
] "+r" (retval
) : [Target
] "m" (*Target
) : "memory");
288 __INTRIN_INLINE
long _InterlockedExchangeAdd16(volatile short * const Addend
, const short Value
)
291 __asm__("lock; xaddw %[retval], %[Addend]" : [retval
] "+r" (retval
) : [Addend
] "m" (*Addend
) : "memory");
295 __INTRIN_INLINE
long _InterlockedExchangeAdd(volatile long * const Addend
, const long Value
)
298 __asm__("lock; xaddl %[retval], %[Addend]" : [retval
] "+r" (retval
) : [Addend
] "m" (*Addend
) : "memory");
302 __INTRIN_INLINE
char _InterlockedAnd8(volatile char * const value
, const char mask
)
312 y
= _InterlockedCompareExchange8(value
, x
& mask
, x
);
319 __INTRIN_INLINE
short _InterlockedAnd16(volatile short * const value
, const short mask
)
329 y
= _InterlockedCompareExchange16(value
, x
& mask
, x
);
336 __INTRIN_INLINE
long _InterlockedAnd(volatile long * const value
, const long mask
)
346 y
= _InterlockedCompareExchange(value
, x
& mask
, x
);
353 __INTRIN_INLINE
char _InterlockedOr8(volatile char * const value
, const char mask
)
363 y
= _InterlockedCompareExchange8(value
, x
| mask
, x
);
370 __INTRIN_INLINE
short _InterlockedOr16(volatile short * const value
, const short mask
)
380 y
= _InterlockedCompareExchange16(value
, x
| mask
, x
);
387 __INTRIN_INLINE
long _InterlockedOr(volatile long * const value
, const long mask
)
397 y
= _InterlockedCompareExchange(value
, x
| mask
, x
);
404 __INTRIN_INLINE
char _InterlockedXor8(volatile char * const value
, const char mask
)
414 y
= _InterlockedCompareExchange8(value
, x
^ mask
, x
);
421 __INTRIN_INLINE
short _InterlockedXor16(volatile short * const value
, const short mask
)
431 y
= _InterlockedCompareExchange16(value
, x
^ mask
, x
);
438 __INTRIN_INLINE
long _InterlockedXor(volatile long * const value
, const long mask
)
448 y
= _InterlockedCompareExchange(value
, x
^ mask
, x
);
457 #if (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100 && defined(__x86_64__)
459 __INTRIN_INLINE
long long _InterlockedCompareExchange64(volatile long long * const Destination
, const long long Exchange
, const long long Comperand
)
461 return __sync_val_compare_and_swap(Destination
, Comperand
, Exchange
);
466 __INTRIN_INLINE
long long _InterlockedCompareExchange64(volatile long long * const Destination
, const long long Exchange
, const long long Comperand
)
468 long long retval
= Comperand
;
472 "lock; cmpxchg8b %[Destination]" :
473 [retval
] "+A" (retval
) :
474 [Destination
] "m" (*Destination
),
475 "b" ((unsigned long)((Exchange
>> 0) & 0xFFFFFFFF)),
476 "c" ((unsigned long)((Exchange
>> 32) & 0xFFFFFFFF)) :
485 __INTRIN_INLINE
long _InterlockedAddLargeStatistic(volatile long long * const Addend
, const long Value
)
489 "lock; add %[Value], %[Lo32];"
491 "lock; adc $0, %[Hi32];"
493 [Lo32
] "+m" (*((volatile long *)(Addend
) + 0)), [Hi32
] "+m" (*((volatile long *)(Addend
) + 1)) :
494 [Value
] "ir" (Value
) :
501 __INTRIN_INLINE
long _InterlockedDecrement(volatile long * const lpAddend
)
503 return _InterlockedExchangeAdd(lpAddend
, -1) - 1;
506 __INTRIN_INLINE
long _InterlockedIncrement(volatile long * const lpAddend
)
508 return _InterlockedExchangeAdd(lpAddend
, 1) + 1;
511 __INTRIN_INLINE
short _InterlockedDecrement16(volatile short * const lpAddend
)
513 return _InterlockedExchangeAdd16(lpAddend
, -1) - 1;
516 __INTRIN_INLINE
short _InterlockedIncrement16(volatile short * const lpAddend
)
518 return _InterlockedExchangeAdd16(lpAddend
, 1) + 1;
521 #if defined(_M_AMD64)
522 __INTRIN_INLINE
long long _InterlockedDecrement64(volatile long long * const lpAddend
)
524 return _InterlockedExchangeAdd64(lpAddend
, -1) - 1;
527 __INTRIN_INLINE
long long _InterlockedIncrement64(volatile long long * const lpAddend
)
529 return _InterlockedExchangeAdd64(lpAddend
, 1) + 1;
533 __INTRIN_INLINE
unsigned char _interlockedbittestandreset(volatile long * a
, const long b
)
535 unsigned char retval
;
536 __asm__("lock; btrl %[b], %[a]; setb %b[retval]" : [retval
] "=q" (retval
), [a
] "+m" (*a
) : [b
] "Ir" (b
) : "memory");
540 #if defined(_M_AMD64)
541 __INTRIN_INLINE
unsigned char _interlockedbittestandreset64(volatile long long * a
, const long long b
)
543 unsigned char retval
;
544 __asm__("lock; btrq %[b], %[a]; setb %b[retval]" : [retval
] "=r" (retval
), [a
] "+m" (*a
) : [b
] "Ir" (b
) : "memory");
549 __INTRIN_INLINE
unsigned char _interlockedbittestandset(volatile long * a
, const long b
)
551 unsigned char retval
;
552 __asm__("lock; btsl %[b], %[a]; setc %b[retval]" : [retval
] "=q" (retval
), [a
] "+m" (*a
) : [b
] "Ir" (b
) : "memory");
556 #if defined(_M_AMD64)
557 __INTRIN_INLINE
unsigned char _interlockedbittestandset64(volatile long long * a
, const long long b
)
559 unsigned char retval
;
560 __asm__("lock; btsq %[b], %[a]; setc %b[retval]" : [retval
] "=r" (retval
), [a
] "+m" (*a
) : [b
] "Ir" (b
) : "memory");
565 /*** String operations ***/
566 /* NOTE: we don't set a memory clobber in the __stosX functions because Visual C++ doesn't */
567 __INTRIN_INLINE
void __stosb(unsigned char * Dest
, const unsigned char Data
, size_t Count
)
572 [Dest
] "=D" (Dest
), [Count
] "=c" (Count
) :
573 "[Dest]" (Dest
), "a" (Data
), "[Count]" (Count
)
577 __INTRIN_INLINE
void __stosw(unsigned short * Dest
, const unsigned short Data
, size_t Count
)
582 [Dest
] "=D" (Dest
), [Count
] "=c" (Count
) :
583 "[Dest]" (Dest
), "a" (Data
), "[Count]" (Count
)
587 __INTRIN_INLINE
void __stosd(unsigned long * Dest
, const unsigned long Data
, size_t Count
)
592 [Dest
] "=D" (Dest
), [Count
] "=c" (Count
) :
593 "[Dest]" (Dest
), "a" (Data
), "[Count]" (Count
)
598 __INTRIN_INLINE
void __stosq(unsigned __int64
* Dest
, const unsigned __int64 Data
, size_t Count
)
603 [Dest
] "=D" (Dest
), [Count
] "=c" (Count
) :
604 "[Dest]" (Dest
), "a" (Data
), "[Count]" (Count
)
609 __INTRIN_INLINE
void __movsb(unsigned char * Destination
, const unsigned char * Source
, size_t Count
)
614 [Destination
] "=D" (Destination
), [Source
] "=S" (Source
), [Count
] "=c" (Count
) :
615 "[Destination]" (Destination
), "[Source]" (Source
), "[Count]" (Count
)
619 __INTRIN_INLINE
void __movsw(unsigned short * Destination
, const unsigned short * Source
, size_t Count
)
624 [Destination
] "=D" (Destination
), [Source
] "=S" (Source
), [Count
] "=c" (Count
) :
625 "[Destination]" (Destination
), "[Source]" (Source
), "[Count]" (Count
)
629 __INTRIN_INLINE
void __movsd(unsigned long * Destination
, const unsigned long * Source
, size_t Count
)
634 [Destination
] "=D" (Destination
), [Source
] "=S" (Source
), [Count
] "=c" (Count
) :
635 "[Destination]" (Destination
), "[Source]" (Source
), "[Count]" (Count
)
640 __INTRIN_INLINE
void __movsq(unsigned long * Destination
, const unsigned long * Source
, size_t Count
)
645 [Destination
] "=D" (Destination
), [Source
] "=S" (Source
), [Count
] "=c" (Count
) :
646 "[Destination]" (Destination
), "[Source]" (Source
), "[Count]" (Count
)
651 #if defined(_M_AMD64)
652 /*** GS segment addressing ***/
654 __INTRIN_INLINE
void __writegsbyte(const unsigned long Offset
, const unsigned char Data
)
656 __asm__
__volatile__("movb %b[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
659 __INTRIN_INLINE
void __writegsword(const unsigned long Offset
, const unsigned short Data
)
661 __asm__
__volatile__("movw %w[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
664 __INTRIN_INLINE
void __writegsdword(const unsigned long Offset
, const unsigned long Data
)
666 __asm__
__volatile__("movl %k[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
669 __INTRIN_INLINE
void __writegsqword(const unsigned long Offset
, const unsigned __int64 Data
)
671 __asm__
__volatile__("movq %q[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
674 __INTRIN_INLINE
unsigned char __readgsbyte(const unsigned long Offset
)
677 __asm__
__volatile__("movb %%gs:%a[Offset], %b[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
681 __INTRIN_INLINE
unsigned short __readgsword(const unsigned long Offset
)
683 unsigned short value
;
684 __asm__
__volatile__("movw %%gs:%a[Offset], %w[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
688 __INTRIN_INLINE
unsigned long __readgsdword(const unsigned long Offset
)
691 __asm__
__volatile__("movl %%gs:%a[Offset], %k[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
695 __INTRIN_INLINE
unsigned __int64
__readgsqword(const unsigned long Offset
)
697 unsigned __int64 value
;
698 __asm__
__volatile__("movq %%gs:%a[Offset], %q[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
702 __INTRIN_INLINE
void __incgsbyte(const unsigned long Offset
)
704 __asm__
__volatile__("incb %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
707 __INTRIN_INLINE
void __incgsword(const unsigned long Offset
)
709 __asm__
__volatile__("incw %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
712 __INTRIN_INLINE
void __incgsdword(const unsigned long Offset
)
714 __asm__
__volatile__("incl %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
717 __INTRIN_INLINE
void __addgsbyte(const unsigned long Offset
, const unsigned char Data
)
719 __asm__
__volatile__("addb %b[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
722 __INTRIN_INLINE
void __addgsword(const unsigned long Offset
, const unsigned short Data
)
724 __asm__
__volatile__("addw %w[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
727 __INTRIN_INLINE
void __addgsdword(const unsigned long Offset
, const unsigned int Data
)
729 __asm__
__volatile__("addl %k[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
732 __INTRIN_INLINE
void __addgsqword(const unsigned long Offset
, const unsigned __int64 Data
)
734 __asm__
__volatile__("addq %k[Data], %%gs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
738 /*** FS segment addressing ***/
739 __INTRIN_INLINE
void __writefsbyte(const unsigned long Offset
, const unsigned char Data
)
741 __asm__
__volatile__("movb %b[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "iq" (Data
) : "memory");
744 __INTRIN_INLINE
void __writefsword(const unsigned long Offset
, const unsigned short Data
)
746 __asm__
__volatile__("movw %w[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
749 __INTRIN_INLINE
void __writefsdword(const unsigned long Offset
, const unsigned long Data
)
751 __asm__
__volatile__("movl %k[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "ir" (Data
) : "memory");
754 __INTRIN_INLINE
unsigned char __readfsbyte(const unsigned long Offset
)
757 __asm__
__volatile__("movb %%fs:%a[Offset], %b[value]" : [value
] "=q" (value
) : [Offset
] "ir" (Offset
));
761 __INTRIN_INLINE
unsigned short __readfsword(const unsigned long Offset
)
763 unsigned short value
;
764 __asm__
__volatile__("movw %%fs:%a[Offset], %w[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
768 __INTRIN_INLINE
unsigned long __readfsdword(const unsigned long Offset
)
771 __asm__
__volatile__("movl %%fs:%a[Offset], %k[value]" : [value
] "=r" (value
) : [Offset
] "ir" (Offset
));
775 __INTRIN_INLINE
void __incfsbyte(const unsigned long Offset
)
777 __asm__
__volatile__("incb %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
780 __INTRIN_INLINE
void __incfsword(const unsigned long Offset
)
782 __asm__
__volatile__("incw %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
785 __INTRIN_INLINE
void __incfsdword(const unsigned long Offset
)
787 __asm__
__volatile__("incl %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
) : "memory");
790 /* NOTE: the bizarre implementation of __addfsxxx mimics the broken Visual C++ behavior */
791 __INTRIN_INLINE
void __addfsbyte(const unsigned long Offset
, const unsigned char Data
)
793 if(!__builtin_constant_p(Offset
))
794 __asm__
__volatile__("addb %b[Offset], %%fs:%a[Offset]" : : [Offset
] "r" (Offset
) : "memory");
796 __asm__
__volatile__("addb %b[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "iq" (Data
) : "memory");
799 __INTRIN_INLINE
void __addfsword(const unsigned long Offset
, const unsigned short Data
)
801 if(!__builtin_constant_p(Offset
))
802 __asm__
__volatile__("addw %w[Offset], %%fs:%a[Offset]" : : [Offset
] "r" (Offset
) : "memory");
804 __asm__
__volatile__("addw %w[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "iq" (Data
) : "memory");
807 __INTRIN_INLINE
void __addfsdword(const unsigned long Offset
, const unsigned int Data
)
809 if(!__builtin_constant_p(Offset
))
810 __asm__
__volatile__("addl %k[Offset], %%fs:%a[Offset]" : : [Offset
] "r" (Offset
) : "memory");
812 __asm__
__volatile__("addl %k[Data], %%fs:%a[Offset]" : : [Offset
] "ir" (Offset
), [Data
] "iq" (Data
) : "memory");
817 /*** Bit manipulation ***/
818 __INTRIN_INLINE
unsigned char _BitScanForward(unsigned long * const Index
, const unsigned long Mask
)
820 __asm__("bsfl %[Mask], %[Index]" : [Index
] "=r" (*Index
) : [Mask
] "mr" (Mask
));
824 __INTRIN_INLINE
unsigned char _BitScanReverse(unsigned long * const Index
, const unsigned long Mask
)
826 __asm__("bsrl %[Mask], %[Index]" : [Index
] "=r" (*Index
) : [Mask
] "mr" (Mask
));
830 /* NOTE: again, the bizarre implementation follows Visual C++ */
831 __INTRIN_INLINE
unsigned char _bittest(const long * const a
, const long b
)
833 unsigned char retval
;
835 if(__builtin_constant_p(b
))
836 __asm__("bt %[b], %[a]; setb %b[retval]" : [retval
] "=q" (retval
) : [a
] "mr" (*(a
+ (b
/ 32))), [b
] "Ir" (b
% 32));
838 __asm__("bt %[b], %[a]; setb %b[retval]" : [retval
] "=q" (retval
) : [a
] "mr" (*a
), [b
] "r" (b
));
844 __INTRIN_INLINE
unsigned char _bittest64(const __int64
* const a
, const __int64 b
)
846 unsigned char retval
;
848 if(__builtin_constant_p(b
))
849 __asm__("bt %[b], %[a]; setb %b[retval]" : [retval
] "=q" (retval
) : [a
] "mr" (*(a
+ (b
/ 64))), [b
] "Ir" (b
% 64));
851 __asm__("bt %[b], %[a]; setb %b[retval]" : [retval
] "=q" (retval
) : [a
] "mr" (*a
), [b
] "r" (b
));
857 __INTRIN_INLINE
unsigned char _bittestandcomplement(long * const a
, const long b
)
859 unsigned char retval
;
861 if(__builtin_constant_p(b
))
862 __asm__("btc %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*(a
+ (b
/ 32))), [retval
] "=q" (retval
) : [b
] "Ir" (b
% 32));
864 __asm__("btc %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*a
), [retval
] "=q" (retval
) : [b
] "r" (b
));
869 __INTRIN_INLINE
unsigned char _bittestandreset(long * const a
, const long b
)
871 unsigned char retval
;
873 if(__builtin_constant_p(b
))
874 __asm__("btr %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*(a
+ (b
/ 32))), [retval
] "=q" (retval
) : [b
] "Ir" (b
% 32));
876 __asm__("btr %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*a
), [retval
] "=q" (retval
) : [b
] "r" (b
));
881 __INTRIN_INLINE
unsigned char _bittestandset(long * const a
, const long b
)
883 unsigned char retval
;
885 if(__builtin_constant_p(b
))
886 __asm__("bts %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*(a
+ (b
/ 32))), [retval
] "=q" (retval
) : [b
] "Ir" (b
% 32));
888 __asm__("bts %[b], %[a]; setb %b[retval]" : [a
] "+mr" (*a
), [retval
] "=q" (retval
) : [b
] "r" (b
));
893 __INTRIN_INLINE
unsigned char _rotl8(unsigned char value
, unsigned char shift
)
895 unsigned char retval
;
896 __asm__("rolb %b[shift], %b[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
900 __INTRIN_INLINE
unsigned short _rotl16(unsigned short value
, unsigned char shift
)
902 unsigned short retval
;
903 __asm__("rolw %b[shift], %w[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
907 __INTRIN_INLINE
unsigned int _rotl(unsigned int value
, int shift
)
909 unsigned long retval
;
910 __asm__("roll %b[shift], %k[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
914 __INTRIN_INLINE
unsigned int _rotr(unsigned int value
, int shift
)
916 unsigned long retval
;
917 __asm__("rorl %b[shift], %k[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
921 __INTRIN_INLINE
unsigned char _rotr8(unsigned char value
, unsigned char shift
)
923 unsigned char retval
;
924 __asm__("rorb %b[shift], %b[retval]" : [retval
] "=qm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
928 __INTRIN_INLINE
unsigned short _rotr16(unsigned short value
, unsigned char shift
)
930 unsigned short retval
;
931 __asm__("rorw %b[shift], %w[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
), [shift
] "Nc" (shift
));
936 NOTE: in __ll_lshift, __ll_rshift and __ull_rshift we use the "A"
937 constraint (edx:eax) for the Mask argument, because it's the only way GCC
938 can pass 64-bit operands around - passing the two 32 bit parts separately
939 just confuses it. Also we declare Bit as an int and then truncate it to
940 match Visual C++ behavior
942 __INTRIN_INLINE
unsigned long long __ll_lshift(const unsigned long long Mask
, const int Bit
)
944 unsigned long long retval
= Mask
;
948 "shldl %b[Bit], %%eax, %%edx; sall %b[Bit], %%eax" :
950 [Bit
] "Nc" ((unsigned char)((unsigned long)Bit
) & 0xFF)
956 __INTRIN_INLINE
long long __ll_rshift(const long long Mask
, const int Bit
)
958 unsigned long long retval
= Mask
;
962 "shldl %b[Bit], %%eax, %%edx; sarl %b[Bit], %%eax" :
964 [Bit
] "Nc" ((unsigned char)((unsigned long)Bit
) & 0xFF)
970 __INTRIN_INLINE
unsigned long long __ull_rshift(const unsigned long long Mask
, int Bit
)
972 unsigned long long retval
= Mask
;
976 "shrdl %b[Bit], %%eax, %%edx; shrl %b[Bit], %%eax" :
978 [Bit
] "Nc" ((unsigned char)((unsigned long)Bit
) & 0xFF)
984 __INTRIN_INLINE
unsigned short _byteswap_ushort(unsigned short value
)
986 unsigned short retval
;
987 __asm__("rorw $8, %w[retval]" : [retval
] "=rm" (retval
) : "[retval]" (value
));
991 __INTRIN_INLINE
unsigned long _byteswap_ulong(unsigned long value
)
993 unsigned long retval
;
994 __asm__("bswapl %[retval]" : [retval
] "=r" (retval
) : "[retval]" (value
));
999 __INTRIN_INLINE
unsigned __int64
_byteswap_uint64(unsigned __int64 value
)
1001 unsigned __int64 retval
;
1002 __asm__("bswapq %[retval]" : [retval
] "=r" (retval
) : "[retval]" (value
));
1006 __INTRIN_INLINE
unsigned __int64
_byteswap_uint64(unsigned __int64 value
)
1011 unsigned long lowpart
;
1012 unsigned long hipart
;
1015 retval
.int64part
= value
;
1016 __asm__("bswapl %[lowpart]\n"
1017 "bswapl %[hipart]\n"
1018 : [lowpart
] "=r" (retval
.hipart
), [hipart
] "=r" (retval
.lowpart
) : "[lowpart]" (retval
.lowpart
), "[hipart]" (retval
.hipart
) );
1019 return retval
.int64part
;
1023 /*** 64-bit math ***/
1024 __INTRIN_INLINE
long long __emul(const int a
, const int b
)
1027 __asm__("imull %[b]" : "=A" (retval
) : [a
] "a" (a
), [b
] "rm" (b
));
1031 __INTRIN_INLINE
unsigned long long __emulu(const unsigned int a
, const unsigned int b
)
1033 unsigned long long retval
;
1034 __asm__("mull %[b]" : "=A" (retval
) : [a
] "a" (a
), [b
] "rm" (b
));
1040 __INTRIN_INLINE __int64
__mulh(__int64 a
, __int64 b
)
1043 __asm__("imulq %[b]" : "=d" (retval
) : [a
] "a" (a
), [b
] "rm" (b
));
1047 __INTRIN_INLINE
unsigned __int64
__umulh(unsigned __int64 a
, unsigned __int64 b
)
1049 unsigned __int64 retval
;
1050 __asm__("mulq %[b]" : "=d" (retval
) : [a
] "a" (a
), [b
] "rm" (b
));
1057 __INTRIN_INLINE
unsigned char __inbyte(const unsigned short Port
)
1060 __asm__
__volatile__("inb %w[Port], %b[byte]" : [byte
] "=a" (byte
) : [Port
] "Nd" (Port
));
1064 __INTRIN_INLINE
unsigned short __inword(const unsigned short Port
)
1066 unsigned short word
;
1067 __asm__
__volatile__("inw %w[Port], %w[word]" : [word
] "=a" (word
) : [Port
] "Nd" (Port
));
1071 __INTRIN_INLINE
unsigned long __indword(const unsigned short Port
)
1073 unsigned long dword
;
1074 __asm__
__volatile__("inl %w[Port], %k[dword]" : [dword
] "=a" (dword
) : [Port
] "Nd" (Port
));
1078 __INTRIN_INLINE
void __inbytestring(unsigned short Port
, unsigned char * Buffer
, unsigned long Count
)
1080 __asm__ __volatile__
1083 [Buffer
] "=D" (Buffer
), [Count
] "=c" (Count
) :
1084 "d" (Port
), "[Buffer]" (Buffer
), "[Count]" (Count
) :
1089 __INTRIN_INLINE
void __inwordstring(unsigned short Port
, unsigned short * Buffer
, unsigned long Count
)
1091 __asm__ __volatile__
1094 [Buffer
] "=D" (Buffer
), [Count
] "=c" (Count
) :
1095 "d" (Port
), "[Buffer]" (Buffer
), "[Count]" (Count
) :
1100 __INTRIN_INLINE
void __indwordstring(unsigned short Port
, unsigned long * Buffer
, unsigned long Count
)
1102 __asm__ __volatile__
1105 [Buffer
] "=D" (Buffer
), [Count
] "=c" (Count
) :
1106 "d" (Port
), "[Buffer]" (Buffer
), "[Count]" (Count
) :
1111 __INTRIN_INLINE
void __outbyte(unsigned short const Port
, const unsigned char Data
)
1113 __asm__
__volatile__("outb %b[Data], %w[Port]" : : [Port
] "Nd" (Port
), [Data
] "a" (Data
));
1116 __INTRIN_INLINE
void __outword(unsigned short const Port
, const unsigned short Data
)
1118 __asm__
__volatile__("outw %w[Data], %w[Port]" : : [Port
] "Nd" (Port
), [Data
] "a" (Data
));
1121 __INTRIN_INLINE
void __outdword(unsigned short const Port
, const unsigned long Data
)
1123 __asm__
__volatile__("outl %k[Data], %w[Port]" : : [Port
] "Nd" (Port
), [Data
] "a" (Data
));
1126 __INTRIN_INLINE
void __outbytestring(unsigned short const Port
, const unsigned char * const Buffer
, const unsigned long Count
)
1128 __asm__
__volatile__("rep; outsb" : : [Port
] "d" (Port
), [Buffer
] "S" (Buffer
), "c" (Count
));
1131 __INTRIN_INLINE
void __outwordstring(unsigned short const Port
, const unsigned short * const Buffer
, const unsigned long Count
)
1133 __asm__
__volatile__("rep; outsw" : : [Port
] "d" (Port
), [Buffer
] "S" (Buffer
), "c" (Count
));
1136 __INTRIN_INLINE
void __outdwordstring(unsigned short const Port
, const unsigned long * const Buffer
, const unsigned long Count
)
1138 __asm__
__volatile__("rep; outsl" : : [Port
] "d" (Port
), [Buffer
] "S" (Buffer
), "c" (Count
));
1141 __INTRIN_INLINE
int _inp(unsigned short Port
)
1143 return __inbyte(Port
);
1146 __INTRIN_INLINE
unsigned short _inpw(unsigned short Port
)
1148 return __inword(Port
);
1151 __INTRIN_INLINE
unsigned long _inpd(unsigned short Port
)
1153 return __indword(Port
);
1156 __INTRIN_INLINE
int _outp(unsigned short Port
, int databyte
)
1158 __outbyte(Port
, databyte
);
1162 __INTRIN_INLINE
unsigned short _outpw(unsigned short Port
, unsigned short dataword
)
1164 __outword(Port
, dataword
);
1168 __INTRIN_INLINE
unsigned long _outpd(unsigned short Port
, unsigned long dataword
)
1170 __outdword(Port
, dataword
);
1175 /*** System information ***/
1176 __INTRIN_INLINE
void __cpuid(int CPUInfo
[], const int InfoType
)
1178 __asm__
__volatile__("cpuid" : "=a" (CPUInfo
[0]), "=b" (CPUInfo
[1]), "=c" (CPUInfo
[2]), "=d" (CPUInfo
[3]) : "a" (InfoType
));
1181 __INTRIN_INLINE
unsigned long long __rdtsc(void)
1184 unsigned long long low
, high
;
1185 __asm__
__volatile__("rdtsc" : "=a"(low
), "=d"(high
));
1186 return low
| (high
<< 32);
1188 unsigned long long retval
;
1189 __asm__
__volatile__("rdtsc" : "=A"(retval
));
1194 __INTRIN_INLINE
void __writeeflags(uintptr_t Value
)
1196 __asm__
__volatile__("push %0\n popf" : : "rim"(Value
));
1199 __INTRIN_INLINE
uintptr_t __readeflags(void)
1202 __asm__
__volatile__("pushf\n pop %0" : "=rm"(retval
));
1206 /*** Interrupts ***/
1207 __INTRIN_INLINE
void __debugbreak(void)
1212 __INTRIN_INLINE
void __int2c(void)
1214 __asm__("int $0x2c");
1217 __INTRIN_INLINE
void _disable(void)
1219 __asm__("cli" : : : "memory");
1222 __INTRIN_INLINE
void _enable(void)
1224 __asm__("sti" : : : "memory");
1227 __INTRIN_INLINE
void __halt(void)
1229 __asm__("hlt\n\t" : : : "memory");
1232 /*** Protected memory management ***/
1234 __INTRIN_INLINE
void __writecr0(const unsigned __int64 Data
)
1236 __asm__("mov %[Data], %%cr0" : : [Data
] "r" (Data
) : "memory");
1239 __INTRIN_INLINE
void __writecr3(const unsigned __int64 Data
)
1241 __asm__("mov %[Data], %%cr3" : : [Data
] "r" (Data
) : "memory");
1244 __INTRIN_INLINE
void __writecr4(const unsigned __int64 Data
)
1246 __asm__("mov %[Data], %%cr4" : : [Data
] "r" (Data
) : "memory");
1250 __INTRIN_INLINE
void __writecr8(const unsigned __int64 Data
)
1252 __asm__("mov %[Data], %%cr8" : : [Data
] "r" (Data
) : "memory");
1255 __INTRIN_INLINE
unsigned __int64
__readcr0(void)
1257 unsigned __int64 value
;
1258 __asm__
__volatile__("mov %%cr0, %[value]" : [value
] "=r" (value
));
1262 __INTRIN_INLINE
unsigned __int64
__readcr2(void)
1264 unsigned __int64 value
;
1265 __asm__
__volatile__("mov %%cr2, %[value]" : [value
] "=r" (value
));
1269 __INTRIN_INLINE
unsigned __int64
__readcr3(void)
1271 unsigned __int64 value
;
1272 __asm__
__volatile__("mov %%cr3, %[value]" : [value
] "=r" (value
));
1276 __INTRIN_INLINE
unsigned __int64
__readcr4(void)
1278 unsigned __int64 value
;
1279 __asm__
__volatile__("mov %%cr4, %[value]" : [value
] "=r" (value
));
1283 __INTRIN_INLINE
unsigned __int64
__readcr8(void)
1285 unsigned __int64 value
;
1286 __asm__
__volatile__("movq %%cr8, %q[value]" : [value
] "=r" (value
));
1290 __INTRIN_INLINE
unsigned long __readcr0(void)
1292 unsigned long value
;
1293 __asm__
__volatile__("mov %%cr0, %[value]" : [value
] "=r" (value
));
1297 __INTRIN_INLINE
unsigned long __readcr2(void)
1299 unsigned long value
;
1300 __asm__
__volatile__("mov %%cr2, %[value]" : [value
] "=r" (value
));
1304 __INTRIN_INLINE
unsigned long __readcr3(void)
1306 unsigned long value
;
1307 __asm__
__volatile__("mov %%cr3, %[value]" : [value
] "=r" (value
));
1311 __INTRIN_INLINE
unsigned long __readcr4(void)
1313 unsigned long value
;
1314 __asm__
__volatile__("mov %%cr4, %[value]" : [value
] "=r" (value
));
1320 __INTRIN_INLINE
unsigned __int64
__readdr(unsigned int reg
)
1322 unsigned __int64 value
;
1326 __asm__
__volatile__("movq %%dr0, %q[value]" : [value
] "=r" (value
));
1329 __asm__
__volatile__("movq %%dr1, %q[value]" : [value
] "=r" (value
));
1332 __asm__
__volatile__("movq %%dr2, %q[value]" : [value
] "=r" (value
));
1335 __asm__
__volatile__("movq %%dr3, %q[value]" : [value
] "=r" (value
));
1338 __asm__
__volatile__("movq %%dr4, %q[value]" : [value
] "=r" (value
));
1341 __asm__
__volatile__("movq %%dr5, %q[value]" : [value
] "=r" (value
));
1344 __asm__
__volatile__("movq %%dr6, %q[value]" : [value
] "=r" (value
));
1347 __asm__
__volatile__("movq %%dr7, %q[value]" : [value
] "=r" (value
));
1353 __INTRIN_INLINE
void __writedr(unsigned reg
, unsigned __int64 value
)
1358 __asm__("movq %q[value], %%dr0" : : [value
] "r" (value
) : "memory");
1361 __asm__("movq %q[value], %%dr1" : : [value
] "r" (value
) : "memory");
1364 __asm__("movq %q[value], %%dr2" : : [value
] "r" (value
) : "memory");
1367 __asm__("movq %q[value], %%dr3" : : [value
] "r" (value
) : "memory");
1370 __asm__("movq %q[value], %%dr4" : : [value
] "r" (value
) : "memory");
1373 __asm__("movq %q[value], %%dr5" : : [value
] "r" (value
) : "memory");
1376 __asm__("movq %q[value], %%dr6" : : [value
] "r" (value
) : "memory");
1379 __asm__("movq %q[value], %%dr7" : : [value
] "r" (value
) : "memory");
1384 __INTRIN_INLINE
unsigned int __readdr(unsigned int reg
)
1390 __asm__
__volatile__("mov %%dr0, %[value]" : [value
] "=r" (value
));
1393 __asm__
__volatile__("mov %%dr1, %[value]" : [value
] "=r" (value
));
1396 __asm__
__volatile__("mov %%dr2, %[value]" : [value
] "=r" (value
));
1399 __asm__
__volatile__("mov %%dr3, %[value]" : [value
] "=r" (value
));
1402 __asm__
__volatile__("mov %%dr4, %[value]" : [value
] "=r" (value
));
1405 __asm__
__volatile__("mov %%dr5, %[value]" : [value
] "=r" (value
));
1408 __asm__
__volatile__("mov %%dr6, %[value]" : [value
] "=r" (value
));
1411 __asm__
__volatile__("mov %%dr7, %[value]" : [value
] "=r" (value
));
1417 __INTRIN_INLINE
void __writedr(unsigned reg
, unsigned int value
)
1422 __asm__("mov %[value], %%dr0" : : [value
] "r" (value
) : "memory");
1425 __asm__("mov %[value], %%dr1" : : [value
] "r" (value
) : "memory");
1428 __asm__("mov %[value], %%dr2" : : [value
] "r" (value
) : "memory");
1431 __asm__("mov %[value], %%dr3" : : [value
] "r" (value
) : "memory");
1434 __asm__("mov %[value], %%dr4" : : [value
] "r" (value
) : "memory");
1437 __asm__("mov %[value], %%dr5" : : [value
] "r" (value
) : "memory");
1440 __asm__("mov %[value], %%dr6" : : [value
] "r" (value
) : "memory");
1443 __asm__("mov %[value], %%dr7" : : [value
] "r" (value
) : "memory");
1449 __INTRIN_INLINE
void __invlpg(void * const Address
)
1451 __asm__("invlpg %[Address]" : : [Address
] "m" (*((unsigned char *)(Address
))) : "memory");
1455 /*** System operations ***/
1456 __INTRIN_INLINE
unsigned long long __readmsr(const int reg
)
1459 unsigned long low
, high
;
1460 __asm__
__volatile__("rdmsr" : "=a" (low
), "=d" (high
) : "c" (reg
));
1461 return ((unsigned long long)high
<< 32) | low
;
1463 unsigned long long retval
;
1464 __asm__
__volatile__("rdmsr" : "=A" (retval
) : "c" (reg
));
1469 __INTRIN_INLINE
void __writemsr(const unsigned long Register
, const unsigned long long Value
)
1472 __asm__
__volatile__("wrmsr" : : "a" (Value
), "d" (Value
>> 32), "c" (Register
));
1474 __asm__
__volatile__("wrmsr" : : "A" (Value
), "c" (Register
));
1478 __INTRIN_INLINE
unsigned long long __readpmc(const int counter
)
1480 unsigned long long retval
;
1481 __asm__
__volatile__("rdpmc" : "=A" (retval
) : "c" (counter
));
1485 /* NOTE: an immediate value for 'a' will raise an ICE in Visual C++ */
1486 __INTRIN_INLINE
unsigned long __segmentlimit(const unsigned long a
)
1488 unsigned long retval
;
1489 __asm__
__volatile__("lsl %[a], %[retval]" : [retval
] "=r" (retval
) : [a
] "rm" (a
));
1493 __INTRIN_INLINE
void __wbinvd(void)
1495 __asm__
__volatile__("wbinvd" : : : "memory");
1498 __INTRIN_INLINE
void __lidt(void *Source
)
1500 __asm__
__volatile__("lidt %0" : : "m"(*(short*)Source
));
1503 __INTRIN_INLINE
void __sidt(void *Destination
)
1505 __asm__
__volatile__("sidt %0" : : "m"(*(short*)Destination
) : "memory");
1508 /*** Misc operations ***/
1510 __INTRIN_INLINE
void _mm_pause(void)
1512 __asm__
__volatile__("pause" : : : "memory");
1515 __INTRIN_INLINE
void __nop(void)
1517 __asm__
__volatile__("nop");
1524 #endif /* KJK_INTRIN_X86_H_ */