8 /*** Stack frame juggling ***/
9 void * _ReturnAddress(void);
10 #pragma intrinsic(_ReturnAddress)
11 void * _AddressOfReturnAddress(void);
12 #pragma intrinsic(_AddressOfReturnAddress)
13 #if defined(_M_IX86) || defined(_M_AMD64)
14 unsigned int __getcallerseflags(void);
15 #pragma intrinsic(__getcallerseflags)
18 /*** Memory barriers ***/
19 void _ReadWriteBarrier(void);
20 #pragma intrinsic(_ReadWriteBarrier)
21 void _ReadBarrier(void);
22 #pragma intrinsic(_ReadBarrier)
23 void _WriteBarrier(void);
24 #pragma intrinsic(_WriteBarrier)
25 #if defined(_M_IX86) || defined(_M_AMD64)
26 void _mm_mfence(void);
27 #pragma intrinsic(_mm_mfence)
28 void _mm_lfence(void);
29 #pragma intrinsic(_mm_lfence)
30 void _mm_sfence(void);
31 #pragma intrinsic(_mm_sfence)
34 void __faststorefence(void);
35 #pragma intrinsic(__faststorefence)
38 /*** Atomic operations ***/
39 long _InterlockedCompareExchange(volatile long * Destination
, long Exchange
, long Comperand
);
40 #pragma intrinsic(_InterlockedCompareExchange)
41 long _InterlockedExchange(volatile long * Target
, long Value
);
42 #pragma intrinsic(_InterlockedExchange)
43 long _InterlockedExchangeAdd(volatile long * Addend
, long Value
);
44 #pragma intrinsic(_InterlockedExchangeAdd)
45 char _InterlockedAnd8(volatile char * value
, char mask
);
46 #pragma intrinsic(_InterlockedAnd8)
47 short _InterlockedAnd16(volatile short * value
, short mask
);
48 #pragma intrinsic(_InterlockedAnd16)
49 long _InterlockedAnd(volatile long * value
, long mask
);
50 #pragma intrinsic(_InterlockedAnd)
51 char _InterlockedOr8(volatile char * value
, char mask
);
52 #pragma intrinsic(_InterlockedOr8)
53 short _InterlockedOr16(volatile short * value
, short mask
);
54 #pragma intrinsic(_InterlockedOr16)
55 long _InterlockedOr(volatile long * value
, long mask
);
56 #pragma intrinsic(_InterlockedOr)
57 char _InterlockedXor8(volatile char * value
, char mask
);
58 #pragma intrinsic(_InterlockedXor8)
59 short _InterlockedXor16(volatile short * value
, short mask
);
60 #pragma intrinsic(_InterlockedXor16)
61 long _InterlockedXor(volatile long * value
, long mask
);
62 #pragma intrinsic(_InterlockedXor)
63 long _InterlockedDecrement(volatile long * lpAddend
);
64 #pragma intrinsic(_InterlockedDecrement)
65 long _InterlockedIncrement(volatile long * lpAddend
);
66 #pragma intrinsic(_InterlockedIncrement)
67 short _InterlockedDecrement16(volatile short * lpAddend
);
68 #pragma intrinsic(_InterlockedDecrement16)
69 short _InterlockedIncrement16(volatile short * lpAddend
);
70 #pragma intrinsic(_InterlockedIncrement16)
71 unsigned char _interlockedbittestandreset(volatile long * a
, long b
);
72 #pragma intrinsic(_interlockedbittestandreset)
73 unsigned char _interlockedbittestandset(volatile long * a
, long b
);
74 #pragma intrinsic(_interlockedbittestandset)
77 long _InterlockedAddLargeStatistic(volatile __int64
* Addend
, long Value
);
78 #pragma intrinsic(_InterlockedAddLargeStatistic)
79 #elif defined(_M_AMD64)
80 __int64
_InterlockedExchange64(volatile __int64
* Target
, __int64 Value
);
81 #pragma intrinsic(_InterlockedExchange64)
82 __int64
_InterlockedExchangeAdd64(volatile __int64
* Addend
, __int64 Value
);
83 #pragma intrinsic(_InterlockedExchangeAdd64)
84 void * _InterlockedCompareExchangePointer(void * volatile * Destination
, void * Exchange
, void * Comperand
);
85 #pragma intrinsic(_InterlockedCompareExchangePointer)
86 void * _InterlockedExchangePointer(void * volatile * Target
, void * Value
);
87 #pragma intrinsic(_InterlockedExchangePointer)
88 __int64
_InterlockedAnd64(volatile __int64
* value
, __int64 mask
);
89 #pragma intrinsic(_InterlockedAnd64)
90 __int64
_InterlockedOr64(volatile __int64
* value
, __int64 mask
);
91 #pragma intrinsic(_InterlockedOr64)
92 __int64
_InterlockedCompareExchange64(volatile __int64
* Destination
, __int64 Exchange
, __int64 Comperand
);
93 #pragma intrinsic(_InterlockedCompareExchange64)
94 __int64
_InterlockedDecrement64(volatile __int64
* lpAddend
);
95 #pragma intrinsic(_InterlockedDecrement64)
96 __int64
_InterlockedIncrement64(volatile __int64
* lpAddend
);
97 #pragma intrinsic(_InterlockedIncrement64)
98 unsigned char _interlockedbittestandreset64(volatile __int64
* a
, __int64 b
);
99 #pragma intrinsic(_interlockedbittestandreset64)
100 unsigned char _interlockedbittestandset64(volatile __int64
* a
, __int64 b
);
101 #pragma intrinsic(_interlockedbittestandset64)
104 #if defined(_M_IX86) || defined(_M_AMD64)
105 /*** String operations ***/
106 void __stosb(unsigned char * Dest
, unsigned char Data
, size_t Count
);
107 #pragma intrinsic(__stosb)
108 void __stosw(unsigned short * Dest
, unsigned short Data
, size_t Count
);
109 #pragma intrinsic(__stosw)
110 void __stosd(unsigned long * Dest
, unsigned long Data
, size_t Count
);
111 #pragma intrinsic(__stosd)
112 void __movsb(unsigned char * Destination
, unsigned char const * Source
, size_t Count
);
113 #pragma intrinsic(__movsb)
114 void __movsw(unsigned short * Destination
, unsigned short const * Source
, size_t Count
);
115 #pragma intrinsic(__movsw)
116 void __movsd(unsigned long * Destination
, unsigned long const * Source
, size_t Count
);
117 #pragma intrinsic(__movsd)
120 void __stosq(unsigned __int64
* Dest
, unsigned __int64 Data
, size_t Count
);
121 #pragma intrinsic(__stosq)
122 void __movsq(unsigned __int64
* Destination
, unsigned __int64
const * Source
, size_t Count
);
123 #pragma intrinsic(__movsq)
126 #if defined(_M_AMD64)
127 /*** GS segment addressing ***/
128 void __writegsbyte(unsigned long Offset
, unsigned char Data
);
129 #pragma intrinsic(__writegsbyte)
130 void __writegsword(unsigned long Offset
, unsigned short Data
);
131 #pragma intrinsic(__writegsword)
132 void __writegsdword(unsigned long Offset
, unsigned long Data
);
133 #pragma intrinsic(__writegsdword)
134 void __writegsqword(unsigned long Offset
, unsigned __int64 Data
);
135 #pragma intrinsic(__writegsqword)
136 unsigned char __readgsbyte(unsigned long Offset
);
137 #pragma intrinsic(__readgsbyte)
138 unsigned short __readgsword(unsigned long Offset
);
139 #pragma intrinsic(__readgsword)
140 unsigned long __readgsdword(unsigned long Offset
);
141 #pragma intrinsic(__readgsdword)
142 unsigned __int64
__readgsqword(unsigned long Offset
);
143 #pragma intrinsic(__readgsqword)
144 void __incgsbyte(unsigned long Offset
);
145 #pragma intrinsic(__incgsbyte)
146 void __incgsword(unsigned long Offset
);
147 #pragma intrinsic(__incgsword)
148 void __incgsdword(unsigned long Offset
);
149 #pragma intrinsic(__incgsdword)
150 void __addgsbyte(unsigned long Offset
, unsigned char Data
);
151 #pragma intrinsic(__addgsbyte)
152 void __addgsword(unsigned long Offset
, unsigned short Data
);
153 #pragma intrinsic(__addgsword)
154 void __addgsdword(unsigned long Offset
, unsigned int Data
);
155 #pragma intrinsic(__addgsdword)
156 void __addgsqword(unsigned long Offset
, unsigned __int64 Data
);
157 #pragma intrinsic(__addgsqword)
161 /*** FS segment addressing ***/
162 void __writefsbyte(unsigned long Offset
, unsigned char Data
);
163 #pragma intrinsic(__writefsbyte)
164 void __writefsword(unsigned long Offset
, unsigned short Data
);
165 #pragma intrinsic(__writefsword)
166 void __writefsdword(unsigned long Offset
, unsigned long Data
);
167 #pragma intrinsic(__writefsdword)
168 unsigned char __readfsbyte(unsigned long Offset
);
169 #pragma intrinsic(__readfsbyte)
170 unsigned short __readfsword(unsigned long Offset
);
171 #pragma intrinsic(__readfsword)
172 unsigned long __readfsdword(unsigned long Offset
);
173 #pragma intrinsic(__readfsdword)
174 void __incfsbyte(unsigned long Offset
);
175 #pragma intrinsic(__incfsbyte)
176 void __incfsword(unsigned long Offset
);
177 #pragma intrinsic(__incfsword)
178 void __incfsdword(unsigned long Offset
);
179 #pragma intrinsic(__incfsdword)
180 void __addfsbyte(unsigned long Offset
, unsigned char Data
);
181 #pragma intrinsic(__addfsbyte)
182 void __addfsword(unsigned long Offset
, unsigned short Data
);
183 #pragma intrinsic(__addfsword)
184 void __addfsdword(unsigned long Offset
, unsigned int Data
);
185 #pragma intrinsic(__addfsdword)
189 /*** Bit manipulation ***/
190 unsigned char _BitScanForward(unsigned long * Index
, unsigned long Mask
);
191 #pragma intrinsic(_BitScanForward)
192 unsigned char _BitScanReverse(unsigned long * Index
, unsigned long Mask
);
193 #pragma intrinsic(_BitScanReverse)
195 unsigned char _BitScanForward64(unsigned long * Index
, unsigned long long Mask
);
196 #pragma intrinsic(_BitScanForward64)
197 unsigned char _BitScanReverse64(unsigned long * Index
, unsigned long long Mask
);
198 #pragma intrinsic(_BitScanReverse64)
200 unsigned char _bittest(const long * a
, long b
);
201 #pragma intrinsic(_bittest)
202 unsigned char _bittestandcomplement(long * a
, long b
);
203 #pragma intrinsic(_bittestandcomplement)
204 unsigned char _bittestandreset(long * a
, long b
);
205 #pragma intrinsic(_bittestandreset)
206 unsigned char _bittestandset(long * a
, long b
);
207 #pragma intrinsic(_bittestandset)
208 unsigned char _rotl8(unsigned char value
, unsigned char shift
);
209 #pragma intrinsic(_rotl8)
210 unsigned short _rotl16(unsigned short value
, unsigned char shift
);
211 #pragma intrinsic(_rotl16)
212 unsigned int _rotl(unsigned int value
, int shift
);
213 #pragma intrinsic(_rotl)
214 unsigned int _rotr(unsigned int value
, int shift
);
215 #pragma intrinsic(_rotr)
216 unsigned char _rotr8(unsigned char value
, unsigned char shift
);
217 #pragma intrinsic(_rotr8)
218 unsigned short _rotr16(unsigned short value
, unsigned char shift
);
219 #pragma intrinsic(_rotr16)
220 unsigned short _byteswap_ushort(unsigned short value
);
221 #pragma intrinsic(_byteswap_ushort)
222 unsigned long _byteswap_ulong(unsigned long value
);
223 #pragma intrinsic(_byteswap_ulong)
224 unsigned __int64
_byteswap_uint64(unsigned __int64 value
);
225 #pragma intrinsic(_byteswap_uint64)
226 #if defined(_M_IX86) || defined(_M_AMD64)
227 unsigned __int64
__ll_lshift(unsigned __int64 Mask
, int Bit
);
228 #pragma intrinsic(__ll_lshift)
229 __int64
__ll_rshift(__int64 Mask
, int Bit
);
230 #pragma intrinsic(__ll_rshift)
231 unsigned __int64
__ull_rshift(unsigned __int64 Mask
, int Bit
);
232 #pragma intrinsic(__ull_rshift)
235 unsigned char _bittest64(__int64
const *a
, __int64 b
);
236 #pragma intrinsic(_bittest64)
239 #if defined(_M_IX86) || defined(_M_AMD64)
240 /*** 64-bit math ***/
241 __int64
__emul(int a
, int b
);
242 #pragma intrinsic(__emul)
243 unsigned __int64
__emulu(unsigned int a
, unsigned int b
);
244 #pragma intrinsic(__emulu)
247 unsigned __int64
__umulh(unsigned __int64 a
, unsigned __int64 b
);
248 #pragma intrinsic(__umulh)
251 #if defined(_M_IX86) || defined(_M_AMD64)
253 unsigned char __inbyte(unsigned short Port
);
254 #pragma intrinsic(__inbyte)
255 unsigned short __inword(unsigned short Port
);
256 #pragma intrinsic(__inword)
257 unsigned long __indword(unsigned short Port
);
258 #pragma intrinsic(__indword)
259 void __inbytestring(unsigned short Port
, unsigned char * Buffer
, unsigned long Count
);
260 #pragma intrinsic(__inbytestring)
261 void __inwordstring(unsigned short Port
, unsigned short * Buffer
, unsigned long Count
);
262 #pragma intrinsic(__inwordstring)
263 void __indwordstring(unsigned short Port
, unsigned long * Buffer
, unsigned long Count
);
264 #pragma intrinsic(__indwordstring)
265 void __outbyte(unsigned short Port
, unsigned char Data
);
266 #pragma intrinsic(__outbyte)
267 void __outword(unsigned short Port
, unsigned short Data
);
268 #pragma intrinsic(__outword)
269 void __outdword(unsigned short Port
, unsigned long Data
);
270 #pragma intrinsic(__outdword)
271 void __outbytestring(unsigned short Port
, unsigned char * Buffer
, unsigned long Count
);
272 #pragma intrinsic(__outbytestring)
273 void __outwordstring(unsigned short Port
, unsigned short * Buffer
, unsigned long Count
);
274 #pragma intrinsic(__outwordstring)
275 void __outdwordstring(unsigned short Port
, unsigned long * Buffer
, unsigned long Count
);
276 #pragma intrinsic(__outdwordstring)
279 #if defined(_M_IX86) || defined(_M_AMD64)
280 /*** System information ***/
281 void __cpuid(int CPUInfo
[], int InfoType
);
282 #pragma intrinsic(__cpuid)
283 unsigned __int64
__rdtsc(void);
284 #pragma intrinsic(__rdtsc)
285 void __writeeflags(uintptr_t Value
);
286 #pragma intrinsic(__writeeflags)
287 uintptr_t __readeflags(void);
288 #pragma intrinsic(__readeflags)
292 void __debugbreak(void);
293 #pragma intrinsic(__debugbreak)
295 #pragma intrinsic(_disable)
297 #pragma intrinsic(_enable)
298 #if defined(_M_IX86) || defined(_M_AMD64)
300 #pragma intrinsic(__int2c)
302 #pragma intrinsic(__halt)
305 /*** Protected memory management ***/
306 #if defined(_M_IX86) || defined(_M_AMD64)
307 void __writecr0(unsigned __int64 Data
);
308 #pragma intrinsic(__writecr0)
309 void __writecr3(unsigned __int64 Data
);
310 #pragma intrinsic(__writecr3)
311 void __writecr4(unsigned __int64 Data
);
312 #pragma intrinsic(__writecr4)
315 void __writecr8(unsigned __int64 Data
);
316 #pragma intrinsic(__writecr8)
317 unsigned __int64
__readcr0(void);
318 #pragma intrinsic(__readcr0)
319 unsigned __int64
__readcr2(void);
320 #pragma intrinsic(__readcr2)
321 unsigned __int64
__readcr3(void);
322 #pragma intrinsic(__readcr3)
323 unsigned __int64
__readcr4(void);
324 #pragma intrinsic(__readcr4)
325 unsigned __int64
__readcr8(void);
326 #pragma intrinsic(__readcr8)
327 unsigned __int64
__readdr(unsigned int reg
);
328 #pragma intrinsic(__readdr)
329 void __writedr(unsigned reg
, unsigned __int64 value
);
330 #pragma intrinsic(__writedr)
331 #elif defined(_M_IX86)
332 unsigned long __readcr0(void);
333 unsigned long __readcr2(void);
334 unsigned long __readcr3(void);
335 //unsigned long __readcr4(void);
336 //#pragma intrinsic(__readcr4)
337 // HACK: MSVC is broken
338 unsigned long ___readcr4(void);
339 #define __readcr4 ___readcr4
341 unsigned int __readdr(unsigned int reg
);
342 void __writedr(unsigned reg
, unsigned int value
);
346 // This intrinsic is broken and generates wrong opcodes,
347 // when optimization is enabled!
348 #pragma warning(push)
349 #pragma warning(disable:4711)
350 void __forceinline
__invlpg_fixed(void * Address
)
361 #define __invlpg __invlpg_fixed
362 #elif defined(_M_AMD64)
363 void __invlpg(void * Address
);
364 #pragma intrinsic(__invlpg)
367 /*** System operations ***/
368 #if defined(_M_IX86) || defined(_M_AMD64)
369 unsigned __int64
__readmsr(int reg
);
370 #pragma intrinsic(__readmsr)
371 void __writemsr(unsigned long Register
, unsigned __int64 Value
);
372 #pragma intrinsic(__writemsr)
373 unsigned __int64
__readpmc(int counter
);
374 #pragma intrinsic(__readpmc)
375 unsigned long __segmentlimit(unsigned long a
);
376 #pragma intrinsic(__segmentlimit)
378 #pragma intrinsic(__wbinvd)
379 void __lidt(void *Source
);
380 #pragma intrinsic(__lidt)
381 void __sidt(void *Destination
);
382 #pragma intrinsic(__sidt)
383 void _mm_pause(void);
384 #pragma intrinsic(_mm_pause)
391 #endif /* KJK_INTRIN_H_ */