[SDK] One step further towards ReactOS source code tree restructure: the sdk folder...
[reactos.git] / reactos / sdk / include / crt / msc / intrin.h
1
2 #ifdef __cplusplus
3 extern "C" {
4 #endif
5
6 /*** Stack frame juggling ***/
7 #pragma intrinsic(_ReturnAddress)
8 #pragma intrinsic(_AddressOfReturnAddress)
9 #if defined(_M_IX86) || defined(_M_AMD64)
10 #pragma intrinsic(__getcallerseflags)
11 #endif
12
13 /*** Memory barriers ***/
14 #pragma intrinsic(_ReadWriteBarrier)
15 #pragma intrinsic(_ReadBarrier)
16 #pragma intrinsic(_WriteBarrier)
17 #if defined(_M_IX86) || defined(_M_AMD64)
18 #pragma intrinsic(_mm_mfence)
19 #pragma intrinsic(_mm_lfence)
20 #pragma intrinsic(_mm_sfence)
21 #endif
22 #if defined(_M_AMD64)
23 #pragma intrinsic(__faststorefence)
24 #elif defined(_M_ARM)
25 #pragma intrinsic(__iso_volatile_load16)
26 #pragma intrinsic(__iso_volatile_load32)
27 #pragma intrinsic(__iso_volatile_load64)
28 #pragma intrinsic(__iso_volatile_load8)
29 #pragma intrinsic(__iso_volatile_store16)
30 #pragma intrinsic(__iso_volatile_store32)
31 #pragma intrinsic(__iso_volatile_store64)
32 #pragma intrinsic(__iso_volatile_store8)
33 #endif
34
35 /*** Atomic operations ***/
36 #pragma intrinsic(_InterlockedCompareExchange)
37 #pragma intrinsic(_InterlockedCompareExchange8)
38 #pragma intrinsic(_InterlockedCompareExchange16)
39 #pragma intrinsic(_InterlockedCompareExchange64)
40 #pragma intrinsic(_InterlockedExchange)
41 #pragma intrinsic(_InterlockedExchange8)
42 #pragma intrinsic(_InterlockedExchange16)
43 #pragma intrinsic(_InterlockedExchangeAdd)
44 #pragma intrinsic(_InterlockedExchangeAdd8)
45 #pragma intrinsic(_InterlockedExchangeAdd16)
46 #pragma intrinsic(_InterlockedAnd8)
47 #pragma intrinsic(_InterlockedAnd16)
48 #pragma intrinsic(_InterlockedAnd)
49 #pragma intrinsic(_InterlockedOr8)
50 #pragma intrinsic(_InterlockedOr16)
51 #pragma intrinsic(_InterlockedOr)
52 #pragma intrinsic(_InterlockedXor8)
53 #pragma intrinsic(_InterlockedXor16)
54 #pragma intrinsic(_InterlockedXor)
55 #pragma intrinsic(_InterlockedDecrement)
56 #pragma intrinsic(_InterlockedIncrement)
57 #pragma intrinsic(_InterlockedDecrement16)
58 #pragma intrinsic(_InterlockedIncrement16)
59 #pragma intrinsic(_interlockedbittestandreset)
60 #pragma intrinsic(_interlockedbittestandset)
61 #if defined(_M_IX86)
62 #pragma intrinsic(_InterlockedAddLargeStatistic)
63 #elif defined(_M_AMD64)
64 #pragma intrinsic(_InterlockedExchange64)
65 #pragma intrinsic(_InterlockedExchangeAdd64)
66 #pragma intrinsic(_InterlockedCompareExchangePointer)
67 #pragma intrinsic(_InterlockedExchangePointer)
68 #pragma intrinsic(_InterlockedCompareExchange128)
69 #pragma intrinsic(_InterlockedAnd64)
70 #pragma intrinsic(_InterlockedOr64)
71 #pragma intrinsic(_InterlockedDecrement64)
72 #pragma intrinsic(_InterlockedIncrement64)
73 #pragma intrinsic(_interlockedbittestandreset64)
74 #pragma intrinsic(_interlockedbittestandset64)
75 #pragma intrinsic(_InterlockedAnd_np)
76 #pragma intrinsic(_InterlockedAnd8_np)
77 #pragma intrinsic(_InterlockedAnd16_np)
78 #pragma intrinsic(_InterlockedAnd64_np)
79 #pragma intrinsic(_InterlockedCompareExchange16_np)
80 #pragma intrinsic(_InterlockedCompareExchange64_np)
81 #pragma intrinsic(_InterlockedCompareExchange128_np)
82 #pragma intrinsic(_InterlockedCompareExchangePointer_np)
83 #pragma intrinsic(_InterlockedCompareExchange_np)
84 #pragma intrinsic(_InterlockedOr16_np)
85 #pragma intrinsic(_InterlockedOr8_np)
86 #pragma intrinsic(_InterlockedOr_np)
87 #pragma intrinsic(_InterlockedXor16_np)
88 #pragma intrinsic(_InterlockedXor64_np)
89 #pragma intrinsic(_InterlockedXor8_np)
90 #pragma intrinsic(_InterlockedXor_np)
91 #pragma intrinsic(_InterlockedOr64_np)
92 #elif defined(_M_ARM)
93
94 #endif
95
96 #if defined(_M_AMD64) || defined(_M_ARM)
97 #endif
98
99 /*** String operations ***/
100 #if defined(_M_IX86) || defined(_M_AMD64)
101 #pragma intrinsic(__stosb)
102 #pragma intrinsic(__stosw)
103 #pragma intrinsic(__stosd)
104 #pragma intrinsic(__movsb)
105 #pragma intrinsic(__movsw)
106 #pragma intrinsic(__movsd)
107 #endif
108 #ifdef _M_AMD64
109 #pragma intrinsic(__stosq)
110 #pragma intrinsic(__movsq)
111 #endif
112
113 /*** GS segment addressing ***/
114 #if defined(_M_AMD64)
115 #pragma intrinsic(__writegsbyte)
116 #pragma intrinsic(__writegsword)
117 #pragma intrinsic(__writegsdword)
118 #pragma intrinsic(__writegsqword)
119 #pragma intrinsic(__readgsbyte)
120 #pragma intrinsic(__readgsword)
121 #pragma intrinsic(__readgsdword)
122 #pragma intrinsic(__readgsqword)
123 #pragma intrinsic(__incgsbyte)
124 #pragma intrinsic(__incgsword)
125 #pragma intrinsic(__incgsdword)
126 #pragma intrinsic(__incgsqword)
127 #pragma intrinsic(__addgsbyte)
128 #pragma intrinsic(__addgsword)
129 #pragma intrinsic(__addgsdword)
130 #pragma intrinsic(__addgsqword)
131 #endif
132
133 /*** FS segment addressing ***/
134 #if defined(_M_IX86)
135 #pragma intrinsic(__writefsbyte)
136 #pragma intrinsic(__writefsword)
137 #pragma intrinsic(__writefsdword)
138 #pragma intrinsic(__writefsdword)
139 #pragma intrinsic(__readfsbyte)
140 #pragma intrinsic(__readfsword)
141 #pragma intrinsic(__readfsdword)
142 #pragma intrinsic(__incfsbyte)
143 #pragma intrinsic(__incfsword)
144 #pragma intrinsic(__incfsdword)
145 #pragma intrinsic(__addfsbyte)
146 #pragma intrinsic(__addfsword)
147 #pragma intrinsic(__addfsdword)
148 #endif
149
150 /*** Bit manipulation ***/
151 #pragma intrinsic(_BitScanForward)
152 #pragma intrinsic(_BitScanReverse)
153 #ifdef _WIN64
154 #pragma intrinsic(_BitScanForward64)
155 #pragma intrinsic(_BitScanReverse64)
156 #endif
157 #pragma intrinsic(_bittest)
158 #pragma intrinsic(_bittestandcomplement)
159 #pragma intrinsic(_bittestandreset)
160 #pragma intrinsic(_bittestandset)
161 #pragma intrinsic(_rotl8)
162 #pragma intrinsic(_rotl16)
163 #pragma intrinsic(_rotl)
164 #pragma intrinsic(_rotl64)
165 #pragma intrinsic(_lrotl)
166 #pragma intrinsic(_rotr8)
167 #pragma intrinsic(_rotr16)
168 #pragma intrinsic(_rotr)
169 #pragma intrinsic(_rotr64)
170 #pragma intrinsic(_lrotr)
171 #pragma intrinsic(_byteswap_ushort)
172 #pragma intrinsic(_byteswap_ulong)
173 #pragma intrinsic(_byteswap_uint64)
174 #if defined(_M_IX86) || defined(_M_AMD64)
175 #pragma intrinsic(__ll_lshift)
176 __int64 __ll_rshift(__int64 Mask, int Bit);
177 #pragma intrinsic(__ll_rshift)
178 #pragma intrinsic(__ull_rshift)
179 #pragma intrinsic(__lzcnt)
180 #pragma intrinsic(__lzcnt16)
181 #pragma intrinsic(__popcnt)
182 #pragma intrinsic(__popcnt16)
183 #endif
184 #ifdef _M_AMD64
185 #pragma intrinsic(__shiftleft128)
186 #pragma intrinsic(__shiftright128)
187 #pragma intrinsic(_bittest64)
188 #pragma intrinsic(_bittestandcomplement64)
189 #pragma intrinsic(_bittestandreset64)
190 #pragma intrinsic(_bittestandset64)
191 #pragma intrinsic(__lzcnt64)
192 #pragma intrinsic(__popcnt64)
193 #elif defined(_M_ARM)
194
195 #endif
196
197 /*** 64/128-bit math ***/
198 __int64 __cdecl _abs64(__int64);
199 #pragma intrinsic(_abs64)
200 #if defined(_M_IX86) || defined(_M_AMD64)
201 #pragma intrinsic(__emul)
202 #pragma intrinsic(__emulu)
203 #endif
204 #ifdef _M_AMD64
205 __int64 __mulh(__int64 a, __int64 b);
206 #pragma intrinsic(__mulh)
207 #pragma intrinsic(__umulh)
208 __int64 _mul128(__int64 Multiplier, __int64 Multiplicand, __int64 * HighProduct);
209 #pragma intrinsic(_mul128)
210 #pragma intrinsic(_umul128)
211 #elif defined(_M_ARM)
212 #pragma intrinsic(_MulHigh)
213 #pragma intrinsic(_MulUnsignedHigh)
214 #endif
215
216 /** Floating point stuff **/
217 #if defined(_M_ARM)
218 #pragma intrinsic(_isunordered)
219 #pragma intrinsic(_isunorderedf)
220 #pragma intrinsic(_CopyDoubleFromInt64)
221 #pragma intrinsic(_CopyFloatFromInt32)
222 #pragma intrinsic(_CopyInt32FromFloat)
223 #pragma intrinsic(_CopyInt64FromDouble)
224 #endif
225
226 /*** Port I/O ***/
227 #if defined(_M_IX86) || defined(_M_AMD64)
228 #pragma intrinsic(__inbyte)
229 #pragma intrinsic(__inword)
230 #pragma intrinsic(__indword)
231 #pragma intrinsic(__inbytestring)
232 #pragma intrinsic(__inwordstring)
233 #pragma intrinsic(__indwordstring)
234 #pragma intrinsic(__outbyte)
235 #pragma intrinsic(__outword)
236 #pragma intrinsic(__outdword)
237 #pragma intrinsic(__outbytestring)
238 #pragma intrinsic(__outwordstring)
239 #pragma intrinsic(__outdwordstring)
240 #pragma intrinsic(_inp)
241 #pragma intrinsic(_inpd)
242 #pragma intrinsic(_inpw)
243 #pragma intrinsic(inp)
244 #pragma intrinsic(inpd)
245 #pragma intrinsic(inpw)
246 #pragma intrinsic(_outp)
247 #pragma intrinsic(_outpd)
248 #pragma intrinsic(_outpw)
249 #pragma intrinsic(outp)
250 #pragma intrinsic(outpd)
251 #pragma intrinsic(outpw)
252 #endif
253
254 /*** System information ***/
255 #if defined(_M_IX86) || defined(_M_AMD64)
256 #pragma intrinsic(__cpuid)
257 #pragma intrinsic(__cpuidex)
258 #pragma intrinsic(__rdtsc)
259 #pragma intrinsic(__rdtscp)
260 #pragma intrinsic(__writeeflags)
261 #pragma intrinsic(__readeflags)
262 #endif
263
264 /*** Interrupts and traps ***/
265 #pragma intrinsic(__debugbreak)
266 #pragma intrinsic(_disable)
267 #pragma intrinsic(_enable)
268 #if defined(_M_IX86) || defined(_M_AMD64)
269 #pragma intrinsic(__int2c)
270 #pragma intrinsic(__halt)
271 #pragma intrinsic(__ud2)
272 #if (_MSC_VER >= 1700)
273 #pragma intrinsic(__fastfail)
274 #else
275 __declspec(noreturn) __forceinline
276 void __fastfail(unsigned int Code)
277 {
278 __asm
279 {
280 mov ecx, Code
281 int 29h
282 }
283 }
284 #endif
285 #endif
286 #if defined(_M_ARM)
287 #endif
288
289 /*** Protected memory management ***/
290 #if defined(_M_IX86) || defined(_M_AMD64)
291 #pragma intrinsic(__writecr0)
292 #pragma intrinsic(__writecr3)
293 #pragma intrinsic(__writecr4)
294 #pragma intrinsic(__writecr8)
295 #endif
296 #if defined(_M_IX86)
297 #pragma intrinsic(__readcr0)
298 #pragma intrinsic(__readcr2)
299 #pragma intrinsic(__readcr3)
300 //#pragma intrinsic(__readcr4)
301 // HACK: MSVC is broken
302 unsigned long __cdecl ___readcr4(void);
303 #define __readcr4 ___readcr4
304 #pragma intrinsic(__readcr8)
305 #pragma intrinsic(__readdr)
306 #pragma intrinsic(__writedr)
307 // This intrinsic is broken and generates wrong opcodes,
308 // when optimization is enabled!
309 #pragma warning(push)
310 #pragma warning(disable:4711)
311 void __forceinline __invlpg_fixed(void * Address)
312 {
313 _ReadWriteBarrier();
314 __asm
315 {
316 mov eax, Address
317 invlpg [eax]
318 }
319 _ReadWriteBarrier();
320 }
321 #pragma warning(pop)
322 #define __invlpg __invlpg_fixed
323 #elif defined(_M_AMD64)
324 #pragma intrinsic(__invlpg)
325 #pragma intrinsic(__readcr0)
326 #pragma intrinsic(__readcr2)
327 #pragma intrinsic(__readcr3)
328 #pragma intrinsic(__readcr4)
329 #pragma intrinsic(__readcr8)
330 #pragma intrinsic(__readdr)
331 #pragma intrinsic(__writedr)
332 #elif defined(_M_ARM)
333 #pragma intrinsic(__prefetch)
334 #endif
335
336 /*** System operations ***/
337 #if defined(_M_IX86) || defined(_M_AMD64)
338 #pragma intrinsic(__readmsr)
339 #pragma intrinsic(__writemsr)
340 #pragma intrinsic(__readpmc)
341 #pragma intrinsic(__segmentlimit)
342 #pragma intrinsic(__wbinvd)
343 #pragma intrinsic(__lidt)
344 #pragma intrinsic(__sidt)
345 #pragma intrinsic(_mm_pause)
346 #endif
347 #if defined(_M_ARM)
348 #pragma intrinsic(_MoveFromCoprocessor)
349 #pragma intrinsic(_MoveFromCoprocessor2)
350 #pragma intrinsic(_MoveFromCoprocessor64)
351 #pragma intrinsic(_MoveToCoprocessor)
352 #pragma intrinsic(_MoveToCoprocessor2)
353 #pragma intrinsic(_MoveToCoprocessor64)
354 #pragma intrinsic(_ReadStatusReg)
355 #pragma intrinsic(_WriteStatusReg)
356 #pragma intrinsic(__yield)
357 #pragma intrinsic(__wfe)
358 #pragma intrinsic(__wfi)
359 #pragma intrinsic(__swi)
360 #pragma intrinsic(__hvc)
361 #pragma intrinsic(__ldrexd)
362 #pragma intrinsic(__rdpmccntr64)
363 #pragma intrinsic(__sev)
364 #endif
365
366 /** Secure virtual machine **/
367 #if defined(_M_IX86) || defined(_M_AMD64)
368 #pragma intrinsic(__svm_clgi)
369 #pragma intrinsic(__svm_invlpga)
370 #pragma intrinsic(__svm_skinit)
371 #pragma intrinsic(__svm_stgi)
372 #pragma intrinsic(__svm_vmload)
373 #pragma intrinsic(__svm_vmrun)
374 #pragma intrinsic(__svm_vmsave)
375 #endif
376
377 /** Virtual machine extension **/
378 #if defined(_M_IX86) || defined(_M_AMD64)
379
380 #endif
381 #if defined(_M_AMD64)
382
383 #endif
384
385 /** Misc **/
386 #pragma intrinsic(__nop)
387 #if (_MSC_VER >= 1700)
388 #pragma intrinsic(__code_seg)
389 #endif
390 #ifdef _M_ARM
391 #pragma intrinsic(_AddSatInt)
392 #pragma intrinsic(_DAddSatInt)
393 #pragma intrinsic(_DSubSatInt)
394 #pragma intrinsic(_SubSatInt)
395 #pragma intrinsic(__emit)
396 #pragma intrinsic(__static_assert)
397 #endif
398
399 #ifdef __cplusplus
400 }
401 #endif
402
403 /* EOF */