[AMD64]
[reactos.git] / reactos / include / crt / msc / intrin.h
1 #ifndef KJK_INTRIN_H_
2 #define KJK_INTRIN_H_
3
4 #ifdef __cplusplus
5 extern "C" {
6 #endif
7
8 /*** Stack frame juggling ***/
9 void * _ReturnAddress(void);
10 #pragma intrinsic(_ReturnAddress)
11 void * _AddressOfReturnAddress(void);
12 #pragma intrinsic(_AddressOfReturnAddress)
13 #if defined(_M_IX86) || defined(_M_AMD64)
14 unsigned int __getcallerseflags(void);
15 #pragma intrinsic(__getcallerseflags)
16 #endif
17
18 /*** Memory barriers ***/
19 void _ReadWriteBarrier(void);
20 #pragma intrinsic(_ReadWriteBarrier)
21 void _ReadBarrier(void);
22 #pragma intrinsic(_ReadBarrier)
23 void _WriteBarrier(void);
24 #pragma intrinsic(_WriteBarrier)
25 #if defined(_M_IX86) || defined(_M_AMD64)
26 void _mm_mfence(void);
27 #pragma intrinsic(_mm_mfence)
28 void _mm_lfence(void);
29 #pragma intrinsic(_mm_lfence)
30 void _mm_sfence(void);
31 #pragma intrinsic(_mm_sfence)
32 void __nvreg_restore_fence(void);
33 void __nvreg_save_fence(void);
34 #endif
35 #if defined(_M_AMD64)
36 void __faststorefence(void);
37 #pragma intrinsic(__faststorefence)
38 #elif defined(_M_ARM)
39 __int16 __iso_volatile_load16(const volatile __int16 *);
40 #pragma intrinsic(__iso_volatile_load16)
41 __int32 __iso_volatile_load32(const volatile __int32 *);
42 #pragma intrinsic(__iso_volatile_load32)
43 __int64 __iso_volatile_load64(const volatile __int64 *);
44 #pragma intrinsic(__iso_volatile_load64)
45 __int8 __iso_volatile_load8(const volatile __int8 *);
46 #pragma intrinsic(__iso_volatile_load8)
47 void __iso_volatile_store16(volatile __int16 *, __int16);
48 #pragma intrinsic(__iso_volatile_store16)
49 void __iso_volatile_store32(volatile __int32 *, __int32);
50 #pragma intrinsic(__iso_volatile_store32)
51 void __iso_volatile_store64(volatile __int64 *, __int64);
52 #pragma intrinsic(__iso_volatile_store64)
53 void __iso_volatile_store8(volatile __int8 *, __int8);
54 #pragma intrinsic(__iso_volatile_store8)
55 #endif
56
57 /*** Atomic operations ***/
58 long _InterlockedCompareExchange(_Interlocked_operand_ volatile long * Destination, long Exchange, long Comperand);
59 #pragma intrinsic(_InterlockedCompareExchange)
60 char _InterlockedCompareExchange8(_Interlocked_operand_ char volatile * Destination, char Exchange, char Comparand);
61 #pragma intrinsic(_InterlockedCompareExchange8)
62 short _InterlockedCompareExchange16(_Interlocked_operand_ short volatile * Destination, short Exchange, short Comparand);
63 #pragma intrinsic(_InterlockedCompareExchange16)
64 __int64 _InterlockedCompareExchange64(_Interlocked_operand_ volatile __int64 * Destination, __int64 Exchange, __int64 Comperand);
65 #pragma intrinsic(_InterlockedCompareExchange64)
66 long _InterlockedExchange(_Interlocked_operand_ volatile long * Target, long Value);
67 #pragma intrinsic(_InterlockedExchange)
68 char _InterlockedExchange8(_Interlocked_operand_ char volatile * Target, char Value);
69 #pragma intrinsic(_InterlockedExchange8)
70 short _InterlockedExchange16(_Interlocked_operand_ short volatile * Target, short Value);
71 #pragma intrinsic(_InterlockedExchange16)
72 long _InterlockedExchangeAdd(_Interlocked_operand_ volatile long * Addend, long Value);
73 #pragma intrinsic(_InterlockedExchangeAdd)
74 char _InterlockedExchangeAdd8(_Interlocked_operand_ char volatile * Addend, char Value);
75 #pragma intrinsic(_InterlockedExchangeAdd8)
76 short _InterlockedExchangeAdd16(_Interlocked_operand_ short volatile * Addend, short Value);
77 #pragma intrinsic(_InterlockedExchangeAdd16)
78 char _InterlockedAnd8(_Interlocked_operand_ volatile char * value, char mask);
79 #pragma intrinsic(_InterlockedAnd8)
80 short _InterlockedAnd16(_Interlocked_operand_ volatile short * value, short mask);
81 #pragma intrinsic(_InterlockedAnd16)
82 long _InterlockedAnd(_Interlocked_operand_ volatile long * value, long mask);
83 #pragma intrinsic(_InterlockedAnd)
84 char _InterlockedOr8(_Interlocked_operand_ volatile char * value, char mask);
85 #pragma intrinsic(_InterlockedOr8)
86 short _InterlockedOr16(_Interlocked_operand_ volatile short * value, short mask);
87 #pragma intrinsic(_InterlockedOr16)
88 long _InterlockedOr(_Interlocked_operand_ volatile long * value, long mask);
89 #pragma intrinsic(_InterlockedOr)
90 char _InterlockedXor8(_Interlocked_operand_ volatile char * value, char mask);
91 #pragma intrinsic(_InterlockedXor8)
92 short _InterlockedXor16(_Interlocked_operand_ volatile short * value, short mask);
93 #pragma intrinsic(_InterlockedXor16)
94 long _InterlockedXor(_Interlocked_operand_ volatile long * value, long mask);
95 #pragma intrinsic(_InterlockedXor)
96 long _InterlockedDecrement(_Interlocked_operand_ volatile long * lpAddend);
97 #pragma intrinsic(_InterlockedDecrement)
98 long _InterlockedIncrement(_Interlocked_operand_ volatile long * lpAddend);
99 #pragma intrinsic(_InterlockedIncrement)
100 short _InterlockedDecrement16(_Interlocked_operand_ volatile short * lpAddend);
101 #pragma intrinsic(_InterlockedDecrement16)
102 short _InterlockedIncrement16(_Interlocked_operand_ volatile short * lpAddend);
103 #pragma intrinsic(_InterlockedIncrement16)
104 unsigned char _interlockedbittestandreset(volatile long * a, long b);
105 #pragma intrinsic(_interlockedbittestandreset)
106 unsigned char _interlockedbittestandset(volatile long * a, long b);
107 #pragma intrinsic(_interlockedbittestandset)
108 #if defined(_M_IX86)
109 long _InterlockedAddLargeStatistic(_Interlocked_operand_ volatile __int64 * Addend, long Value);
110 #pragma intrinsic(_InterlockedAddLargeStatistic)
111 #elif defined(_M_AMD64)
112 __int64 _InterlockedExchange64(volatile __int64 * Target, __int64 Value);
113 #pragma intrinsic(_InterlockedExchange64)
114 __int64 _InterlockedExchangeAdd64(volatile __int64 * Addend, __int64 Value);
115 #pragma intrinsic(_InterlockedExchangeAdd64)
116 void * _InterlockedCompareExchangePointer(void * volatile * Destination, void * Exchange, void * Comperand);
117 #pragma intrinsic(_InterlockedCompareExchangePointer)
118 void * _InterlockedExchangePointer(void * volatile * Target, void * Value);
119 #pragma intrinsic(_InterlockedExchangePointer)
120 unsigned char _InterlockedCompareExchange128(_Interlocked_operand_ volatile __int64 * Destination, __int64 ExchangeHigh, __int64 ExchangeLow, __int64 * ComparandResult);
121 #pragma intrinsic(_InterlockedCompareExchange128)
122 __int64 _InterlockedAnd64(volatile __int64 * value, __int64 mask);
123 #pragma intrinsic(_InterlockedAnd64)
124 __int64 _InterlockedOr64(volatile __int64 * value, __int64 mask);
125 #pragma intrinsic(_InterlockedOr64)
126 __int64 _InterlockedDecrement64(volatile __int64 * lpAddend);
127 #pragma intrinsic(_InterlockedDecrement64)
128 __int64 _InterlockedIncrement64(volatile __int64 * lpAddend);
129 #pragma intrinsic(_InterlockedIncrement64)
130 unsigned char _interlockedbittestandreset64(volatile __int64 * a, __int64 b);
131 #pragma intrinsic(_interlockedbittestandreset64)
132 unsigned char _interlockedbittestandset64(volatile __int64 * a, __int64 b);
133 #pragma intrinsic(_interlockedbittestandset64)
134 long _InterlockedAnd_np(volatile long * Value, long Mask);
135 #pragma intrinsic(_InterlockedAnd_np)
136 char _InterlockedAnd8_np(volatile char * Value, char Mask);
137 #pragma intrinsic(_InterlockedAnd8_np)
138 short _InterlockedAnd16_np(volatile short * Value, short Mask);
139 #pragma intrinsic(_InterlockedAnd16_np)
140 __int64 _InterlockedAnd64_np(volatile __int64 * Value, __int64 Mask);
141 #pragma intrinsic(_InterlockedAnd64_np)
142 short _InterlockedCompareExchange16_np(volatile short * Destination, short Exchange, short Comparand);
143 #pragma intrinsic(_InterlockedCompareExchange16_np)
144 __int64 _InterlockedCompareExchange64_np(volatile __int64 * Destination, __int64 Exchange, __int64 Comparand);
145 #pragma intrinsic(_InterlockedCompareExchange64_np)
146 unsigned char _InterlockedCompareExchange128_np(volatile __int64 * Destination, __int64 ExchangeHigh, __int64 ExchangeLow, __int64 * ComparandResult);
147 #pragma intrinsic(_InterlockedCompareExchange128_np)
148 void * _InterlockedCompareExchangePointer_np(void * volatile * Destination, void * Exchange, void * Comparand);
149 #pragma intrinsic(_InterlockedCompareExchangePointer_np)
150 long _InterlockedCompareExchange_np(volatile long * Destination, long Exchange, long Comparand);
151 #pragma intrinsic(_InterlockedCompareExchange_np)
152 short _InterlockedOr16_np(volatile short * Value, short Mask);
153 #pragma intrinsic(_InterlockedOr16_np)
154 char _InterlockedOr8_np(volatile char * Value, char Mask);
155 #pragma intrinsic(_InterlockedOr8_np)
156 long _InterlockedOr_np(volatile long * Value, long Mask);
157 #pragma intrinsic(_InterlockedOr_np)
158 short _InterlockedXor16_np(volatile short * Value, short Mask);
159 #pragma intrinsic(_InterlockedXor16_np)
160 __int64 _InterlockedXor64_np(volatile __int64 * Value, __int64 Mask);
161 #pragma intrinsic(_InterlockedXor64_np)
162 char _InterlockedXor8_np(volatile char * Value, char Mask);
163 #pragma intrinsic(_InterlockedXor8_np)
164 long _InterlockedXor_np(volatile long * Value, long Mask);
165 #pragma intrinsic(_InterlockedXor_np)
166 __int64 _InterlockedOr64_np(volatile __int64 * Value, __int64 Mask);
167 #pragma intrinsic(_InterlockedOr64_np)
168 #elif defined(_M_ARM)
169 long _InterlockedAdd(_Interlocked_operand_ long volatile * Addend, long Value);
170 __int64 _InterlockedAdd64(_Interlocked_operand_ __int64 volatile * Addend, __int64 Value);
171 __int64 _InterlockedAdd64_acq(__int64 volatile * Addend, __int64 Value);
172 __int64 _InterlockedAdd64_nf(__int64 volatile * Addend, __int64 Value);
173 __int64 _InterlockedAdd64_rel(__int64 volatile * Addend, __int64 Value);
174 long _InterlockedAdd_acq(long volatile * Addend, long Value);
175 long _InterlockedAdd_nf(long volatile * Addend, long Value);
176 long _InterlockedAdd_rel(long volatile * Addend, long Value);
177 short _InterlockedAnd16_acq(short volatile * Value, short Mask);
178 short _InterlockedAnd16_nf(short volatile * Value, short Mask);
179 short _InterlockedAnd16_rel(short volatile * Value, short Mask);
180 __int64 _InterlockedAnd64_acq(__int64 volatile * Value, __int64 Mask);
181 __int64 _InterlockedAnd64_nf(__int64 volatile * Value, __int64 Mask);
182 __int64 _InterlockedAnd64_rel(__int64 volatile * Value, __int64 Mask);
183 char _InterlockedAnd8_acq(char volatile * Value, char Mask);
184 char _InterlockedAnd8_nf(char volatile * Value, char Mask);
185 char _InterlockedAnd8_rel(char volatile * Value, char Mask);
186 long _InterlockedAnd_acq(long volatile * Value, long Mask);
187 long _InterlockedAnd_nf(long volatile * Value, long Mask);
188 long _InterlockedAnd_rel(long volatile * Value, long Mask);
189 short _InterlockedCompareExchange16_acq(short volatile * Destination, short Exchange, short Comparand);
190 short _InterlockedCompareExchange16_nf(short volatile * Destination, short Exchange, short Comparand);
191 short _InterlockedCompareExchange16_rel(short volatile * Destination, short Exchange, short Comparand);
192 __int64 _InterlockedCompareExchange64_acq(__int64 volatile * Destination, __int64 Exchange, __int64 Comparand);
193 __int64 _InterlockedCompareExchange64_nf(__int64 volatile * Destination, __int64 Exchange, __int64 Comparand);
194 __int64 _InterlockedCompareExchange64_rel(__int64 volatile * Destination, __int64 Exchange, __int64 Comparand);
195 char _InterlockedCompareExchange8_acq(char volatile * Destination, char Exchange, char Comparand);
196 char _InterlockedCompareExchange8_nf(char volatile * Destination, char Exchange, char Comparand);
197 char _InterlockedCompareExchange8_rel(char volatile * Destination, char Exchange, char Comparand);
198 void * _InterlockedCompareExchangePointer_acq(void * volatile * Destination, void * Exchange, void * Comparand);
199 void * _InterlockedCompareExchangePointer_nf(void * volatile * Destination, void * Exchange, void * Comparand);
200 void * _InterlockedCompareExchangePointer_rel(void * volatile * Destination, void * Exchange, void * Comparand);
201 long _InterlockedCompareExchange_acq(long volatile * Destination, long Exchange, long Comparand);
202 long _InterlockedCompareExchange_nf(long volatile * Destination, long Exchange, long Comparand);
203 long _InterlockedCompareExchange_rel(long volatile * Destination, long Exchange, long Comparand);
204 short _InterlockedDecrement16_acq(short volatile * Addend);
205 short _InterlockedDecrement16_nf(short volatile * Addend);
206 short _InterlockedDecrement16_rel(short volatile * Addend);
207 __int64 _InterlockedDecrement64_acq(__int64 volatile * Addend);
208 __int64 _InterlockedDecrement64_nf(__int64 volatile * Addend);
209 __int64 _InterlockedDecrement64_rel(__int64 volatile * Addend);
210 long _InterlockedDecrement_acq(long volatile * Addend);
211 long _InterlockedDecrement_nf(long volatile * Addend);
212 long _InterlockedDecrement_rel(long volatile * Addend);
213 short _InterlockedExchange16_acq(short volatile * Target, short Value);
214 short _InterlockedExchange16_nf(short volatile * Target, short Value);
215 __int64 _InterlockedExchange64_acq(__int64 volatile * Target, __int64 Value);
216 __int64 _InterlockedExchange64_nf(__int64 volatile * Target, __int64 Value);
217 char _InterlockedExchange8_acq(char volatile * Target, char Value);
218 char _InterlockedExchange8_nf(char volatile * Target, char Value);
219 short _InterlockedExchangeAdd16_acq(short volatile * Addend, short Value);
220 short _InterlockedExchangeAdd16_nf(short volatile * Addend, short Value);
221 short _InterlockedExchangeAdd16_rel(short volatile * Addend, short Value);
222 __int64 _InterlockedExchangeAdd64_acq(__int64 volatile * Addend, __int64 Value);
223 __int64 _InterlockedExchangeAdd64_nf(__int64 volatile * Addend, __int64 Value);
224 __int64 _InterlockedExchangeAdd64_rel(__int64 volatile * Addend, __int64 Value);
225 char _InterlockedExchangeAdd8_acq(char volatile * Addend, char Value);
226 char _InterlockedExchangeAdd8_nf(char volatile * Addend, char Value);
227 char _InterlockedExchangeAdd8_rel(char volatile * Addend, char Value);
228 long _InterlockedExchangeAdd_acq(long volatile * Addend, long Value);
229 long _InterlockedExchangeAdd_nf(long volatile * Addend, long Value);
230 long _InterlockedExchangeAdd_rel(long volatile * Addend, long Value);
231 void * _InterlockedExchangePointer_acq(void * volatile * Target, void * Value);
232 void * _InterlockedExchangePointer_nf(void * volatile * Target, void * Value);
233 long _InterlockedExchange_acq(long volatile * Target, long Value);
234 long _InterlockedExchange_nf(long volatile * Target, long Value);
235 short _InterlockedIncrement16_acq(short volatile * Addend);
236 short _InterlockedIncrement16_nf(short volatile * Addend);
237 short _InterlockedIncrement16_rel(short volatile * Addend);
238 __int64 _InterlockedIncrement64_acq(__int64 volatile * Addend);
239 __int64 _InterlockedIncrement64_nf(__int64 volatile * Addend);
240 __int64 _InterlockedIncrement64_rel(__int64 volatile * Addend);
241 long _InterlockedIncrement_acq(long volatile * Addend);
242 long _InterlockedIncrement_nf(long volatile * Addend);
243 long _InterlockedIncrement_rel(long volatile * Addend);
244 short _InterlockedOr16_acq(short volatile * Value, short Mask);
245 short _InterlockedOr16_nf(short volatile * Value, short Mask);
246 short _InterlockedOr16_rel(short volatile * Value, short Mask);
247 __int64 _InterlockedOr64_acq(__int64 volatile * Value, __int64 Mask);
248 __int64 _InterlockedOr64_nf(__int64 volatile * Value, __int64 Mask);
249 __int64 _InterlockedOr64_rel(__int64 volatile * Value, __int64 Mask);
250 char _InterlockedOr8_acq(char volatile * Value, char Mask);
251 char _InterlockedOr8_nf(char volatile * Value, char Mask);
252 char _InterlockedOr8_rel(char volatile * Value, char Mask);
253 long _InterlockedOr_acq(long volatile * Value, long Mask);
254 long _InterlockedOr_nf(long volatile * Value, long Mask);
255 long _InterlockedOr_rel(long volatile * Value, long Mask);
256 short _InterlockedXor16_acq(short volatile * Value, short Mask);
257 short _InterlockedXor16_nf(short volatile * Value, short Mask);
258 short _InterlockedXor16_rel(short volatile * Value, short Mask);
259 __int64 _InterlockedXor64_acq(__int64 volatile * Value, __int64 Mask);
260 __int64 _InterlockedXor64_nf(__int64 volatile * Value, __int64 Mask);
261 __int64 _InterlockedXor64_rel(_Interlocked_operand_ __int64 volatile * Value, __int64 Mask);
262 char _InterlockedXor8_acq(char volatile * Value, char Mask);
263 char _InterlockedXor8_nf(char volatile * Value, char Mask);
264 char _InterlockedXor8_rel(char volatile * Value, char Mask);
265 long _InterlockedXor_acq(long volatile * Value, long Mask);
266 long _InterlockedXor_nf(long volatile * Value, long Mask);
267 long _InterlockedXor_rel(long volatile * Value, long Mask);
268 unsigned char _interlockedbittestandreset_acq(long volatile *, long);
269 unsigned char _interlockedbittestandreset_nf(long volatile *, long);
270 unsigned char _interlockedbittestandreset_rel(long volatile *, long);
271 unsigned char _interlockedbittestandset_acq(long volatile *, long);
272 unsigned char _interlockedbittestandset_nf(long volatile *, long);
273 unsigned char _interlockedbittestandset_rel(long volatile *, long);
274 #endif
275 #if defined(_M_AMD64) || defined(_M_ARM)
276 __int64 _InterlockedAnd64(_Interlocked_operand_ __int64 volatile * Value, __int64 Mask);
277 void * _InterlockedCompareExchangePointer(_Interlocked_operand_ void * volatile * Destination, void * Exchange, void * Comparand);
278 __int64 _InterlockedDecrement64(_Interlocked_operand_ __int64 volatile * Addend);
279 __int64 _InterlockedExchange64(_Interlocked_operand_ __int64 volatile * Target, __int64 Value);
280 __int64 _InterlockedExchangeAdd64(_Interlocked_operand_ __int64 volatile * Addend, __int64 Value);
281 void * _InterlockedExchangePointer(_Interlocked_operand_ void * volatile * Target, void * Value);
282 __int64 _InterlockedIncrement64(_Interlocked_operand_ __int64 volatile * Addend);
283 __int64 _InterlockedOr64(_Interlocked_operand_ __int64 volatile * Value, __int64 Mask);
284 __int64 _InterlockedXor64(_Interlocked_operand_ __int64 volatile * Value, __int64 Mask);
285 #endif
286
287 /*** String operations ***/
288 #if defined(_M_IX86) || defined(_M_AMD64)
289 void __stosb(unsigned char * Dest, unsigned char Data, size_t Count);
290 #pragma intrinsic(__stosb)
291 void __stosw(unsigned short * Dest, unsigned short Data, size_t Count);
292 #pragma intrinsic(__stosw)
293 void __stosd(unsigned long * Dest, unsigned long Data, size_t Count);
294 #pragma intrinsic(__stosd)
295 void __movsb(unsigned char * Destination, unsigned char const * Source, size_t Count);
296 #pragma intrinsic(__movsb)
297 void __movsw(unsigned short * Destination, unsigned short const * Source, size_t Count);
298 #pragma intrinsic(__movsw)
299 void __movsd(unsigned long * Destination, unsigned long const * Source, size_t Count);
300 #pragma intrinsic(__movsd)
301 #endif
302 #ifdef _M_AMD64
303 void __stosq(unsigned __int64 * Dest, unsigned __int64 Data, size_t Count);
304 #pragma intrinsic(__stosq)
305 void __movsq(unsigned __int64 * Destination, unsigned __int64 const * Source, size_t Count);
306 #pragma intrinsic(__movsq)
307 #endif
308
309 /*** GS segment addressing ***/
310 #if defined(_M_AMD64)
311 void __writegsbyte(unsigned long Offset, unsigned char Data);
312 #pragma intrinsic(__writegsbyte)
313 void __writegsword(unsigned long Offset, unsigned short Data);
314 #pragma intrinsic(__writegsword)
315 void __writegsdword(unsigned long Offset, unsigned long Data);
316 #pragma intrinsic(__writegsdword)
317 void __writegsqword(unsigned long Offset, unsigned __int64 Data);
318 #pragma intrinsic(__writegsqword)
319 unsigned char __readgsbyte(unsigned long Offset);
320 #pragma intrinsic(__readgsbyte)
321 unsigned short __readgsword(unsigned long Offset);
322 #pragma intrinsic(__readgsword)
323 unsigned long __readgsdword(unsigned long Offset);
324 #pragma intrinsic(__readgsdword)
325 unsigned __int64 __readgsqword(unsigned long Offset);
326 #pragma intrinsic(__readgsqword)
327 void __incgsbyte(unsigned long Offset);
328 #pragma intrinsic(__incgsbyte)
329 void __incgsword(unsigned long Offset);
330 #pragma intrinsic(__incgsword)
331 void __incgsdword(unsigned long Offset);
332 #pragma intrinsic(__incgsdword)
333 void __incgsqword(unsigned long);
334 #pragma intrinsic(__incgsqword)
335 void __addgsbyte(unsigned long Offset, unsigned char Data);
336 #pragma intrinsic(__addgsbyte)
337 void __addgsword(unsigned long Offset, unsigned short Data);
338 #pragma intrinsic(__addgsword)
339 void __addgsdword(unsigned long Offset, unsigned long Data);
340 #pragma intrinsic(__addgsdword)
341 void __addgsqword(unsigned long Offset, unsigned __int64 Data);
342 #pragma intrinsic(__addgsqword)
343 #endif
344
345 /*** FS segment addressing ***/
346 #if defined(_M_IX86)
347 void __writefsbyte(unsigned long Offset, unsigned char Data);
348 #pragma intrinsic(__writefsbyte)
349 void __writefsword(unsigned long Offset, unsigned short Data);
350 #pragma intrinsic(__writefsword)
351 void __writefsdword(unsigned long Offset, unsigned long Data);
352 #pragma intrinsic(__writefsdword)
353 void __writefsqword(unsigned long Offset, unsigned __int64 Data);
354 #pragma intrinsic(__writefsdword)
355 unsigned char __readfsbyte(unsigned long Offset);
356 #pragma intrinsic(__readfsbyte)
357 unsigned short __readfsword(unsigned long Offset);
358 #pragma intrinsic(__readfsword)
359 unsigned long __readfsdword(unsigned long Offset);
360 #pragma intrinsic(__readfsdword)
361 void __incfsbyte(unsigned long Offset);
362 #pragma intrinsic(__incfsbyte)
363 void __incfsword(unsigned long Offset);
364 #pragma intrinsic(__incfsword)
365 void __incfsdword(unsigned long Offset);
366 #pragma intrinsic(__incfsdword)
367 void __addfsbyte(unsigned long Offset, unsigned char Data);
368 #pragma intrinsic(__addfsbyte)
369 void __addfsword(unsigned long Offset, unsigned short Data);
370 #pragma intrinsic(__addfsword)
371 void __addfsdword(unsigned long Offset, unsigned long Data);
372 #pragma intrinsic(__addfsdword)
373 #endif
374
375 /*** Bit manipulation ***/
376 unsigned char _BitScanForward(unsigned long * Index, unsigned long Mask);
377 #pragma intrinsic(_BitScanForward)
378 unsigned char _BitScanReverse(unsigned long * Index, unsigned long Mask);
379 #pragma intrinsic(_BitScanReverse)
380 #ifdef _WIN64
381 unsigned char _BitScanForward64(unsigned long * Index, unsigned long long Mask);
382 #pragma intrinsic(_BitScanForward64)
383 unsigned char _BitScanReverse64(unsigned long * Index, unsigned long long Mask);
384 #pragma intrinsic(_BitScanReverse64)
385 #endif
386 unsigned char _bittest(const long * a, long b);
387 #pragma intrinsic(_bittest)
388 unsigned char _bittestandcomplement(long * a, long b);
389 #pragma intrinsic(_bittestandcomplement)
390 unsigned char _bittestandreset(long * a, long b);
391 #pragma intrinsic(_bittestandreset)
392 unsigned char _bittestandset(long * a, long b);
393 #pragma intrinsic(_bittestandset)
394 unsigned char _rotl8(unsigned char value, unsigned char shift);
395 #pragma intrinsic(_rotl8)
396 unsigned short _rotl16(unsigned short value, unsigned char shift);
397 #pragma intrinsic(_rotl16)
398 _Check_return_ unsigned int _rotl(unsigned int value, int shift);
399 #pragma intrinsic(_rotl)
400 _Check_return_ unsigned __int64 __cdecl _rotl64(_In_ unsigned __int64 Value, _In_ int Shift);
401 #pragma intrinsic(_rotl64)
402 _Check_return_ unsigned long __cdecl _lrotl(_In_ unsigned long, _In_ int);
403 #pragma intrinsic(_lrotl)
404 unsigned char _rotr8(unsigned char value, unsigned char shift);
405 #pragma intrinsic(_rotr8)
406 unsigned short _rotr16(unsigned short value, unsigned char shift);
407 #pragma intrinsic(_rotr16)
408 _Check_return_ unsigned int _rotr(unsigned int value, int shift);
409 #pragma intrinsic(_rotr)
410 _Check_return_ unsigned __int64 __cdecl _rotr64(_In_ unsigned __int64 Value, _In_ int Shift);
411 #pragma intrinsic(_rotr64)
412 _Check_return_ unsigned long __cdecl _lrotr(_In_ unsigned long, _In_ int);
413 #pragma intrinsic(_lrotr)
414 unsigned short _byteswap_ushort(unsigned short value);
415 #pragma intrinsic(_byteswap_ushort)
416 unsigned long _byteswap_ulong(unsigned long value);
417 #pragma intrinsic(_byteswap_ulong)
418 unsigned __int64 _byteswap_uint64(unsigned __int64 value);
419 #pragma intrinsic(_byteswap_uint64)
420 #if defined(_M_IX86) || defined(_M_AMD64)
421 unsigned __int64 __ll_lshift(unsigned __int64 Mask, int Bit);
422 #pragma intrinsic(__ll_lshift)
423 __int64 __ll_rshift(__int64 Mask, int Bit);
424 #pragma intrinsic(__ll_rshift)
425 unsigned __int64 __ull_rshift(unsigned __int64 Mask, int Bit);
426 #pragma intrinsic(__ull_rshift)
427 unsigned int __lzcnt(unsigned int Value);
428 #pragma intrinsic(__lzcnt)
429 unsigned short __lzcnt16(unsigned short Value);
430 #pragma intrinsic(__lzcnt16)
431 unsigned int __popcnt(unsigned int Value);
432 #pragma intrinsic(__popcnt)
433 unsigned short __popcnt16(unsigned short Value);
434 #pragma intrinsic(__popcnt16)
435 #endif
436 #ifdef _M_AMD64
437 unsigned __int64 __shiftleft128(unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift);
438 #pragma intrinsic(__shiftleft128)
439 unsigned __int64 __shiftright128(unsigned __int64 LowPart, unsigned __int64 HighPart, unsigned char Shift);
440 #pragma intrinsic(__shiftright128)
441 unsigned char _bittest64(__int64 const *a, __int64 b);
442 #pragma intrinsic(_bittest64)
443 unsigned char _bittestandcomplement64(__int64 *a, __int64 b);
444 #pragma intrinsic(_bittestandcomplement64)
445 unsigned char _bittestandreset64(__int64 *a, __int64 b);
446 #pragma intrinsic(_bittestandreset64)
447 unsigned char _bittestandset64(__int64 *a, __int64 b);
448 #pragma intrinsic(_bittestandset64)
449 unsigned __int64 __lzcnt64(unsigned __int64 Value);
450 #pragma intrinsic(__lzcnt64)
451 unsigned __int64 __popcnt64(unsigned __int64 Value);
452 #pragma intrinsic(__popcnt64)
453 #elif defined(_M_ARM)
454 unsigned int _CountLeadingOnes(unsigned long Value);
455 unsigned int _CountLeadingOnes64(unsigned __int64 Value);
456 unsigned int _CountLeadingSigns(long Value);
457 unsigned int _CountLeadingSigns64(__int64 Value);
458 unsigned int _CountLeadingZeros(unsigned long Value);
459 unsigned int _CountLeadingZeros64(unsigned __int64 Value);
460 unsigned int _CountOneBits(unsigned long Value);
461 unsigned int _CountOneBits64(unsigned __int64 Value);
462 #endif
463
464 /*** 64/128-bit math ***/
465 __int64 __cdecl _abs64(__int64);
466 #pragma intrinsic(_abs64)
467 #if defined(_M_IX86) || defined(_M_AMD64)
468 __int64 __emul(int a, int b);
469 #pragma intrinsic(__emul)
470 unsigned __int64 __emulu(unsigned int a, unsigned int b);
471 #pragma intrinsic(__emulu)
472 #endif
473 #ifdef _M_AMD64
474 __int64 __mulh(__int64 a, __int64 b);
475 #pragma intrinsic(__mulh)
476 unsigned __int64 __umulh(unsigned __int64 a, unsigned __int64 b);
477 #pragma intrinsic(__umulh)
478 __int64 _mul128(__int64 Multiplier, __int64 Multiplicand, __int64 * HighProduct);
479 #pragma intrinsic(_mul128)
480 unsigned __int64 _umul128(unsigned __int64 Multiplier, unsigned __int64 Multiplicand, unsigned __int64 * HighProduct);
481 #pragma intrinsic(_umul128)
482 #elif defined(_M_ARM)
483 long _MulHigh(long Multiplier, long Multiplicand);
484 #pragma intrinsic(_MulHigh)
485 unsigned long _MulUnsignedHigh(unsigned long Multiplier, unsigned long Multiplicand);
486 #pragma intrinsic(_MulUnsignedHigh)
487 #endif
488
489 /** Floating point stuff **/
490 #if defined(_M_ARM)
491 int _isunordered(double arg1, double arg2);
492 #pragma intrinsic(_isunordered)
493 int _isunorderedf(float arg1, float arg2);
494 #pragma intrinsic(_isunorderedf)
495 double _CopyDoubleFromInt64(__int64);
496 #pragma intrinsic(_CopyDoubleFromInt64)
497 float _CopyFloatFromInt32(__int32);
498 #pragma intrinsic(_CopyFloatFromInt32)
499 __int32 _CopyInt32FromFloat(float);
500 #pragma intrinsic(_CopyInt32FromFloat)
501 __int64 _CopyInt64FromDouble(double);
502 #pragma intrinsic(_CopyInt64FromDouble)
503 #endif
504
505 /*** Port I/O ***/
506 #if defined(_M_IX86) || defined(_M_AMD64)
507 unsigned char __inbyte(unsigned short Port);
508 #pragma intrinsic(__inbyte)
509 unsigned short __inword(unsigned short Port);
510 #pragma intrinsic(__inword)
511 unsigned long __indword(unsigned short Port);
512 #pragma intrinsic(__indword)
513 void __inbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
514 #pragma intrinsic(__inbytestring)
515 void __inwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
516 #pragma intrinsic(__inwordstring)
517 void __indwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
518 #pragma intrinsic(__indwordstring)
519 void __outbyte(unsigned short Port, unsigned char Data);
520 #pragma intrinsic(__outbyte)
521 void __outword(unsigned short Port, unsigned short Data);
522 #pragma intrinsic(__outword)
523 void __outdword(unsigned short Port, unsigned long Data);
524 #pragma intrinsic(__outdword)
525 void __outbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
526 #pragma intrinsic(__outbytestring)
527 void __outwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
528 #pragma intrinsic(__outwordstring)
529 void __outdwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
530 #pragma intrinsic(__outdwordstring)
531 int __cdecl _inp(unsigned short Port);
532 #pragma intrinsic(_inp)
533 unsigned long __cdecl _inpd(unsigned short Port);
534 #pragma intrinsic(_inpd)
535 unsigned short __cdecl _inpw(unsigned short Port);
536 #pragma intrinsic(_inpw)
537 int __cdecl inp(unsigned short Port);
538 #pragma intrinsic(inp)
539 unsigned long __cdecl inpd(unsigned short Port);
540 #pragma intrinsic(inpd)
541 unsigned short __cdecl inpw(unsigned short Port);
542 #pragma intrinsic(inpw)
543 int __cdecl _outp(unsigned short Port, int Value);
544 #pragma intrinsic(_outp)
545 unsigned long __cdecl _outpd(unsigned short Port, unsigned long Value);
546 #pragma intrinsic(_outpd)
547 unsigned short __cdecl _outpw(unsigned short Port, unsigned short Value);
548 #pragma intrinsic(_outpw)
549 int __cdecl outp(unsigned short Port, int Value);
550 #pragma intrinsic(outp)
551 unsigned long __cdecl outpd(unsigned short Port, unsigned long Value);
552 #pragma intrinsic(outpd)
553 unsigned short __cdecl outpw(unsigned short Port, unsigned short Value);
554 #pragma intrinsic(outpw)
555 #endif
556
557 /*** System information ***/
558 #if defined(_M_IX86) || defined(_M_AMD64)
559 void __cpuid(int CPUInfo[4], int InfoType);
560 #pragma intrinsic(__cpuid)
561 void __cpuidex(int CPUInfo[4], int InfoType, int ECXValue);
562 #pragma intrinsic(__cpuidex)
563 unsigned __int64 __rdtsc(void);
564 #pragma intrinsic(__rdtsc)
565 unsigned __int64 __rdtscp(unsigned int *);
566 #pragma intrinsic(__rdtscp)
567 void __writeeflags(uintptr_t Value);
568 #pragma intrinsic(__writeeflags)
569 uintptr_t __readeflags(void);
570 #pragma intrinsic(__readeflags)
571 #endif
572
573 /*** Interrupts and traps ***/
574 void __debugbreak(void);
575 #pragma intrinsic(__debugbreak)
576 void _disable(void);
577 #pragma intrinsic(_disable)
578 void _enable(void);
579 #pragma intrinsic(_enable)
580 #if defined(_M_IX86) || defined(_M_AMD64)
581 void __int2c(void);
582 #pragma intrinsic(__int2c)
583 void __halt(void);
584 #pragma intrinsic(__halt)
585 void __ud2(void);
586 #pragma intrinsic(__ud2)
587 #if (_MSC_VER >= 1700)
588 __declspec(noreturn) void __fastfail(unsigned int Code);
589 #pragma intrinsic(__fastfail)
590 #else
591 __declspec(noreturn) __forceinline
592 void __fastfail(unsigned int Code)
593 {
594 __asm
595 {
596 mov ecx, Code
597 int 29h
598 }
599 }
600 #endif
601 #endif
602 #if defined(_M_ARM)
603 int __trap(int Arg1, ...);
604 #endif
605
606 /*** Protected memory management ***/
607 #if defined(_M_IX86) || defined(_M_AMD64)
608 void __writecr0(uintptr_t Data);
609 #pragma intrinsic(__writecr0)
610 void __writecr3(uintptr_t Data);
611 #pragma intrinsic(__writecr3)
612 void __writecr4(uintptr_t Data);
613 #pragma intrinsic(__writecr4)
614 void __writecr8(uintptr_t Data);
615 #pragma intrinsic(__writecr8)
616 #endif
617 #if defined(_M_IX86)
618 unsigned long __readcr0(void);
619 #pragma intrinsic(__readcr0)
620 unsigned long __readcr2(void);
621 #pragma intrinsic(__readcr2)
622 unsigned long __readcr3(void);
623 #pragma intrinsic(__readcr3)
624 //unsigned long __readcr4(void);
625 //#pragma intrinsic(__readcr4)
626 // HACK: MSVC is broken
627 unsigned long ___readcr4(void);
628 #define __readcr4 ___readcr4
629 unsigned long __readcr8(void);
630 #pragma intrinsic(__readcr8)
631 unsigned int __readdr(unsigned int reg);
632 #pragma intrinsic(__readdr)
633 void __writedr(unsigned reg, unsigned int value);
634 #pragma intrinsic(__writedr)
635 // This intrinsic is broken and generates wrong opcodes,
636 // when optimization is enabled!
637 #pragma warning(push)
638 #pragma warning(disable:4711)
639 void __forceinline __invlpg_fixed(void * Address)
640 {
641 _ReadWriteBarrier();
642 __asm
643 {
644 mov eax, Address
645 invlpg [eax]
646 }
647 _ReadWriteBarrier();
648 }
649 #pragma warning(pop)
650 #define __invlpg __invlpg_fixed
651 #elif defined(_M_AMD64)
652 void __invlpg(void * Address);
653 #pragma intrinsic(__invlpg)
654 unsigned __int64 __readcr0(void);
655 #pragma intrinsic(__readcr0)
656 unsigned __int64 __readcr2(void);
657 #pragma intrinsic(__readcr2)
658 unsigned __int64 __readcr3(void);
659 #pragma intrinsic(__readcr3)
660 unsigned __int64 __readcr4(void);
661 #pragma intrinsic(__readcr4)
662 unsigned __int64 __readcr8(void);
663 #pragma intrinsic(__readcr8)
664 unsigned __int64 __readdr(unsigned int reg);
665 #pragma intrinsic(__readdr)
666 void __writedr(unsigned reg, unsigned __int64 value);
667 #pragma intrinsic(__writedr)
668 #elif defined(_M_ARM)
669 void __cdecl __prefetch(const void *);
670 #pragma intrinsic(__prefetch)
671 #endif
672
673 /*** System operations ***/
674 #if defined(_M_IX86) || defined(_M_AMD64)
675 unsigned __int64 __readmsr(unsigned long reg);
676 #pragma intrinsic(__readmsr)
677 void __writemsr(unsigned long Register, unsigned __int64 Value);
678 #pragma intrinsic(__writemsr)
679 unsigned __int64 __readpmc(unsigned long counter);
680 #pragma intrinsic(__readpmc)
681 unsigned long __segmentlimit(unsigned long a);
682 #pragma intrinsic(__segmentlimit)
683 void __wbinvd(void);
684 #pragma intrinsic(__wbinvd)
685 void __lidt(void *Source);
686 #pragma intrinsic(__lidt)
687 void __sidt(void *Destination);
688 #pragma intrinsic(__sidt)
689 void _mm_pause(void);
690 #pragma intrinsic(_mm_pause)
691 #endif
692 #if defined(_M_ARM)
693 unsigned int _MoveFromCoprocessor(unsigned int, unsigned int, unsigned int, unsigned int, unsigned int);
694 #pragma intrinsic(_MoveFromCoprocessor)
695 unsigned int _MoveFromCoprocessor2(unsigned int, unsigned int, unsigned int, unsigned int, unsigned int);
696 #pragma intrinsic(_MoveFromCoprocessor2)
697 unsigned __int64 _MoveFromCoprocessor64(unsigned int, unsigned int, unsigned int);
698 #pragma intrinsic(_MoveFromCoprocessor64)
699 void _MoveToCoprocessor(unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int);
700 #pragma intrinsic(_MoveToCoprocessor)
701 void _MoveToCoprocessor2(unsigned int, unsigned int, unsigned int, unsigned int, unsigned int, unsigned int);
702 #pragma intrinsic(_MoveToCoprocessor2)
703 void _MoveToCoprocessor64(unsigned __int64, unsigned int, unsigned int, unsigned int);
704 #pragma intrinsic(_MoveToCoprocessor64)
705 int _ReadStatusReg(int);
706 #pragma intrinsic(_ReadStatusReg)
707 void _WriteStatusReg(int, int, int);
708 #pragma intrinsic(_WriteStatusReg)
709 void __yield(void);
710 #pragma intrinsic(__yield)
711 void __wfe(void);
712 #pragma intrinsic(__wfe)
713 void __wfi(void);
714 #pragma intrinsic(__wfi)
715 unsigned int __swi(unsigned int, ...);
716 #pragma intrinsic(__swi)
717 unsigned int __hvc(unsigned int, ...);
718 #pragma intrinsic(__hvc)
719 __int64 __ldrexd(__int64 volatile *);
720 #pragma intrinsic(__ldrexd)
721 unsigned __int64 __rdpmccntr64(void);
722 #pragma intrinsic(__rdpmccntr64)
723 void __sev(void);
724 #pragma intrinsic(__sev)
725 #endif
726
727 /** Secure virtual machine **/
728 #if defined(_M_IX86) || defined(_M_AMD64)
729 void __svm_clgi(void);
730 #pragma intrinsic(__svm_clgi)
731 void __svm_invlpga(void * Va, int Asid);
732 #pragma intrinsic(__svm_invlpga)
733 void __svm_skinit(int Slb);
734 #pragma intrinsic(__svm_skinit)
735 void __svm_stgi(void);
736 #pragma intrinsic(__svm_stgi)
737 void __svm_vmload(uintptr_t VmcbPhysicalAddress);
738 #pragma intrinsic(__svm_vmload)
739 void __svm_vmrun(uintptr_t VmcbPhysicalAddress);
740 #pragma intrinsic(__svm_vmrun)
741 void __svm_vmsave(uintptr_t VmcbPhysicalAddress);
742 #pragma intrinsic(__svm_vmsave)
743 #endif
744
745 /** Virtual machine extension **/
746 #if defined(_M_IX86) || defined(_M_AMD64)
747 void __vmx_off(void);
748 void __vmx_vmptrst(unsigned __int64 * VmcsPhysicalAddress );
749 #endif
750 #if defined(_M_AMD64)
751 unsigned char __vmx_on(unsigned __int64 * VmsSupportPhysicalAddress);
752 unsigned char __vmx_vmclear(unsigned __int64 * VmcsPhysicalAddress);
753 unsigned char __vmx_vmlaunch(void);
754 unsigned char __vmx_vmptrld(unsigned __int64 *VmcsPhysicalAddress );
755 unsigned char __vmx_vmread(size_t Field, size_t *FieldValue);
756 unsigned char __vmx_vmresume(void);
757 unsigned char __vmx_vmwrite(size_t Field, size_t FieldValue);
758 #endif
759
760 /** Misc **/
761 void __nop(void);
762 #pragma intrinsic(__nop)
763 #if (_MSC_VER >= 1700)
764 void __code_seg(const char *);
765 #pragma intrinsic(__code_seg)
766 #endif
767 #ifdef _M_ARM
768 int _AddSatInt(int, int);
769 #pragma intrinsic(_AddSatInt)
770 int _DAddSatInt(int, int);
771 #pragma intrinsic(_DAddSatInt)
772 int _DSubSatInt(int, int);
773 #pragma intrinsic(_DSubSatInt)
774 int _SubSatInt(int, int);
775 #pragma intrinsic(_SubSatInt)
776 void __emit(unsigned __int32);
777 #pragma intrinsic(__emit)
778 void __static_assert(int, const char *);
779 #pragma intrinsic(__static_assert)
780 #endif
781
782 #ifdef __cplusplus
783 }
784 #endif
785
786 #endif /* KJK_INTRIN_H_ */
787
788 /* EOF */