[PSDK/WIN32K]
[reactos.git] / reactos / include / crt / msc / intrin.h
1 #ifndef KJK_INTRIN_H_
2 #define KJK_INTRIN_H_
3
4 #ifdef __cplusplus
5 extern "C" {
6 #endif
7
8 /*** Stack frame juggling ***/
9 void * _ReturnAddress(void);
10 #pragma intrinsic(_ReturnAddress)
11 void * _AddressOfReturnAddress(void);
12 #pragma intrinsic(_AddressOfReturnAddress)
13 #if defined(_M_IX86) || defined(_M_AMD64)
14 unsigned int __getcallerseflags(void);
15 #pragma intrinsic(__getcallerseflags)
16 #endif
17
18 /*** Memory barriers ***/
19 void _ReadWriteBarrier(void);
20 #pragma intrinsic(_ReadWriteBarrier)
21 void _ReadBarrier(void);
22 #pragma intrinsic(_ReadBarrier)
23 void _WriteBarrier(void);
24 #pragma intrinsic(_WriteBarrier)
25 #if defined(_M_IX86) || defined(_M_AMD64)
26 void _mm_mfence(void);
27 #pragma intrinsic(_mm_mfence)
28 void _mm_lfence(void);
29 #pragma intrinsic(_mm_lfence)
30 void _mm_sfence(void);
31 #pragma intrinsic(_mm_sfence)
32 #endif
33 #ifdef _M_AMD64
34 void __faststorefence(void);
35 #pragma intrinsic(__faststorefence)
36 #endif
37
38 /*** Atomic operations ***/
39 long _InterlockedCompareExchange(volatile long * Destination, long Exchange, long Comperand);
40 #pragma intrinsic(_InterlockedCompareExchange)
41 long _InterlockedExchange(volatile long * Target, long Value);
42 #pragma intrinsic(_InterlockedExchange)
43 long _InterlockedExchangeAdd(volatile long * Addend, long Value);
44 #pragma intrinsic(_InterlockedExchangeAdd)
45 char _InterlockedAnd8(volatile char * value, char mask);
46 #pragma intrinsic(_InterlockedAnd8)
47 short _InterlockedAnd16(volatile short * value, short mask);
48 #pragma intrinsic(_InterlockedAnd16)
49 long _InterlockedAnd(volatile long * value, long mask);
50 #pragma intrinsic(_InterlockedAnd)
51 char _InterlockedOr8(volatile char * value, char mask);
52 #pragma intrinsic(_InterlockedOr8)
53 short _InterlockedOr16(volatile short * value, short mask);
54 #pragma intrinsic(_InterlockedOr16)
55 long _InterlockedOr(volatile long * value, long mask);
56 #pragma intrinsic(_InterlockedOr)
57 char _InterlockedXor8(volatile char * value, char mask);
58 #pragma intrinsic(_InterlockedXor8)
59 short _InterlockedXor16(volatile short * value, short mask);
60 #pragma intrinsic(_InterlockedXor16)
61 long _InterlockedXor(volatile long * value, long mask);
62 #pragma intrinsic(_InterlockedXor)
63 long _InterlockedDecrement(volatile long * lpAddend);
64 #pragma intrinsic(_InterlockedDecrement)
65 long _InterlockedIncrement(volatile long * lpAddend);
66 #pragma intrinsic(_InterlockedIncrement)
67 short _InterlockedDecrement16(volatile short * lpAddend);
68 #pragma intrinsic(_InterlockedDecrement16)
69 short _InterlockedIncrement16(volatile short * lpAddend);
70 #pragma intrinsic(_InterlockedIncrement16)
71 unsigned char _interlockedbittestandreset(volatile long * a, long b);
72 #pragma intrinsic(_interlockedbittestandreset)
73 unsigned char _interlockedbittestandset(volatile long * a, long b);
74 #pragma intrinsic(_interlockedbittestandset)
75
76 #if defined(_M_IX86)
77 long _InterlockedAddLargeStatistic(volatile __int64 * Addend, long Value);
78 #pragma intrinsic(_InterlockedAddLargeStatistic)
79 #elif defined(_M_AMD64)
80 __int64 _InterlockedExchange64(volatile __int64 * Target, __int64 Value);
81 #pragma intrinsic(_InterlockedExchange64)
82 __int64 _InterlockedExchangeAdd64(volatile __int64 * Addend, __int64 Value);
83 #pragma intrinsic(_InterlockedExchangeAdd64)
84 void * _InterlockedCompareExchangePointer(void * volatile * Destination, void * Exchange, void * Comperand);
85 #pragma intrinsic(_InterlockedCompareExchangePointer)
86 void * _InterlockedExchangePointer(void * volatile * Target, void * Value);
87 #pragma intrinsic(_InterlockedExchangePointer)
88 __int64 _InterlockedAnd64(volatile __int64 * value, __int64 mask);
89 #pragma intrinsic(_InterlockedAnd64)
90 __int64 _InterlockedOr64(volatile __int64 * value, __int64 mask);
91 #pragma intrinsic(_InterlockedOr64)
92 __int64 _InterlockedCompareExchange64(volatile __int64 * Destination, __int64 Exchange, __int64 Comperand);
93 #pragma intrinsic(_InterlockedCompareExchange64)
94 __int64 _InterlockedDecrement64(volatile __int64 * lpAddend);
95 #pragma intrinsic(_InterlockedDecrement64)
96 __int64 _InterlockedIncrement64(volatile __int64 * lpAddend);
97 #pragma intrinsic(_InterlockedIncrement64)
98 unsigned char _interlockedbittestandreset64(volatile __int64 * a, __int64 b);
99 #pragma intrinsic(_interlockedbittestandreset64)
100 unsigned char _interlockedbittestandset64(volatile __int64 * a, __int64 b);
101 #pragma intrinsic(_interlockedbittestandset64)
102 #endif
103
104 #if defined(_M_IX86) || defined(_M_AMD64)
105 /*** String operations ***/
106 void __stosb(unsigned char * Dest, unsigned char Data, size_t Count);
107 #pragma intrinsic(__stosb)
108 void __stosw(unsigned short * Dest, unsigned short Data, size_t Count);
109 #pragma intrinsic(__stosw)
110 void __stosd(unsigned long * Dest, unsigned long Data, size_t Count);
111 #pragma intrinsic(__stosd)
112 void __movsb(unsigned char * Destination, unsigned char const * Source, size_t Count);
113 #pragma intrinsic(__movsb)
114 void __movsw(unsigned short * Destination, unsigned short const * Source, size_t Count);
115 #pragma intrinsic(__movsw)
116 void __movsd(unsigned long * Destination, unsigned long const * Source, size_t Count);
117 #pragma intrinsic(__movsd)
118 #endif
119 #ifdef _M_AMD64
120 void __movsq(unsigned __int64 * Destination, unsigned __int64 const * Source, size_t Count);
121 #pragma intrinsic(__movsq)
122 #endif
123
124 #if defined(_M_AMD64)
125 /*** GS segment addressing ***/
126 void __writegsbyte(unsigned long Offset, unsigned char Data);
127 #pragma intrinsic(__writegsbyte)
128 void __writegsword(unsigned long Offset, unsigned short Data);
129 #pragma intrinsic(__writegsword)
130 void __writegsdword(unsigned long Offset, unsigned long Data);
131 #pragma intrinsic(__writegsdword)
132 void __writegsqword(unsigned long Offset, unsigned __int64 Data);
133 #pragma intrinsic(__writegsqword)
134 unsigned char __readgsbyte(unsigned long Offset);
135 #pragma intrinsic(__readgsbyte)
136 unsigned short __readgsword(unsigned long Offset);
137 #pragma intrinsic(__readgsword)
138 unsigned long __readgsdword(unsigned long Offset);
139 #pragma intrinsic(__readgsdword)
140 unsigned __int64 __readgsqword(unsigned long Offset);
141 #pragma intrinsic(__readgsqword)
142 void __incgsbyte(unsigned long Offset);
143 #pragma intrinsic(__incgsbyte)
144 void __incgsword(unsigned long Offset);
145 #pragma intrinsic(__incgsword)
146 void __incgsdword(unsigned long Offset);
147 #pragma intrinsic(__incgsdword)
148 void __addgsbyte(unsigned long Offset, unsigned char Data);
149 #pragma intrinsic(__addgsbyte)
150 void __addgsword(unsigned long Offset, unsigned short Data);
151 #pragma intrinsic(__addgsword)
152 void __addgsdword(unsigned long Offset, unsigned int Data);
153 #pragma intrinsic(__addgsdword)
154 void __addgsqword(unsigned long Offset, unsigned __int64 Data);
155 #pragma intrinsic(__addgsqword)
156 #endif
157
158 #if defined(_M_IX86)
159 /*** FS segment addressing ***/
160 void __writefsbyte(unsigned long Offset, unsigned char Data);
161 #pragma intrinsic(__writefsbyte)
162 void __writefsword(unsigned long Offset, unsigned short Data);
163 #pragma intrinsic(__writefsword)
164 void __writefsdword(unsigned long Offset, unsigned long Data);
165 #pragma intrinsic(__writefsdword)
166 unsigned char __readfsbyte(unsigned long Offset);
167 #pragma intrinsic(__readfsbyte)
168 unsigned short __readfsword(unsigned long Offset);
169 #pragma intrinsic(__readfsword)
170 unsigned long __readfsdword(unsigned long Offset);
171 #pragma intrinsic(__readfsdword)
172 void __incfsbyte(unsigned long Offset);
173 #pragma intrinsic(__incfsbyte)
174 void __incfsword(unsigned long Offset);
175 #pragma intrinsic(__incfsword)
176 void __incfsdword(unsigned long Offset);
177 #pragma intrinsic(__incfsdword)
178 void __addfsbyte(unsigned long Offset, unsigned char Data);
179 #pragma intrinsic(__addfsbyte)
180 void __addfsword(unsigned long Offset, unsigned short Data);
181 #pragma intrinsic(__addfsword)
182 void __addfsdword(unsigned long Offset, unsigned int Data);
183 #pragma intrinsic(__addfsdword)
184 #endif
185
186
187 /*** Bit manipulation ***/
188 unsigned char _BitScanForward(unsigned long * Index, unsigned long Mask);
189 #pragma intrinsic(_BitScanForward)
190 unsigned char _BitScanReverse(unsigned long * Index, unsigned long Mask);
191 #pragma intrinsic(_BitScanReverse)
192 unsigned char _bittest(const long * a, long b);
193 #pragma intrinsic(_bittest)
194 unsigned char _bittestandcomplement(long * a, long b);
195 #pragma intrinsic(_bittestandcomplement)
196 unsigned char _bittestandreset(long * a, long b);
197 #pragma intrinsic(_bittestandreset)
198 unsigned char _bittestandset(long * a, long b);
199 #pragma intrinsic(_bittestandset)
200 unsigned char _rotl8(unsigned char value, unsigned char shift);
201 #pragma intrinsic(_rotl8)
202 unsigned short _rotl16(unsigned short value, unsigned char shift);
203 #pragma intrinsic(_rotl16)
204 unsigned int _rotl(unsigned int value, int shift);
205 #pragma intrinsic(_rotl)
206 unsigned int _rotr(unsigned int value, int shift);
207 #pragma intrinsic(_rotr)
208 unsigned char _rotr8(unsigned char value, unsigned char shift);
209 #pragma intrinsic(_rotr8)
210 unsigned short _rotr16(unsigned short value, unsigned char shift);
211 #pragma intrinsic(_rotr16)
212 unsigned short _byteswap_ushort(unsigned short value);
213 #pragma intrinsic(_byteswap_ushort)
214 unsigned long _byteswap_ulong(unsigned long value);
215 #pragma intrinsic(_byteswap_ulong)
216 unsigned __int64 _byteswap_uint64(unsigned __int64 value);
217 #pragma intrinsic(_byteswap_uint64)
218 #if defined(_M_IX86) || defined(_M_AMD64)
219 unsigned __int64 __ll_lshift(unsigned __int64 Mask, int Bit);
220 #pragma intrinsic(__ll_lshift)
221 __int64 __ll_rshift(__int64 Mask, int Bit);
222 #pragma intrinsic(__ll_rshift)
223 unsigned __int64 __ull_rshift(unsigned __int64 Mask, int Bit);
224 #pragma intrinsic(__ull_rshift)
225 #endif
226 #ifdef _M_AMD64
227 unsigned char _bittest64(__int64 const *a, __int64 b);
228 #pragma intrinsic(_bittest64)
229 #endif
230
231 #if defined(_M_IX86) || defined(_M_AMD64)
232 /*** 64-bit math ***/
233 __int64 __emul(int a, int b);
234 #pragma intrinsic(__emul)
235 unsigned __int64 __emulu(unsigned int a, unsigned int b);
236 #pragma intrinsic(__emulu)
237 #endif
238 #ifdef _M_AMD64
239 unsigned __int64 __umulh(unsigned __int64 a, unsigned __int64 b);
240 #pragma intrinsic(__umulh)
241 #endif
242
243 #if defined(_M_IX86) || defined(_M_AMD64)
244 /*** Port I/O ***/
245 unsigned char __inbyte(unsigned short Port);
246 #pragma intrinsic(__inbyte)
247 unsigned short __inword(unsigned short Port);
248 #pragma intrinsic(__inword)
249 unsigned long __indword(unsigned short Port);
250 #pragma intrinsic(__indword)
251 void __inbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
252 #pragma intrinsic(__inbytestring)
253 void __inwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
254 #pragma intrinsic(__inwordstring)
255 void __indwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
256 #pragma intrinsic(__indwordstring)
257 void __outbyte(unsigned short Port, unsigned char Data);
258 #pragma intrinsic(__outbyte)
259 void __outword(unsigned short Port, unsigned short Data);
260 #pragma intrinsic(__outword)
261 void __outdword(unsigned short Port, unsigned long Data);
262 #pragma intrinsic(__outdword)
263 void __outbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
264 #pragma intrinsic(__outbytestring)
265 void __outwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
266 #pragma intrinsic(__outwordstring)
267 void __outdwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
268 #pragma intrinsic(__outdwordstring)
269 #endif
270
271 #if defined(_M_IX86) || defined(_M_AMD64)
272 /*** System information ***/
273 void __cpuid(int CPUInfo[], int InfoType);
274 #pragma intrinsic(__cpuid)
275 unsigned __int64 __rdtsc(void);
276 #pragma intrinsic(__rdtsc)
277 void __writeeflags(uintptr_t Value);
278 #pragma intrinsic(__writeeflags)
279 uintptr_t __readeflags(void);
280 #pragma intrinsic(__readeflags)
281 #endif
282
283 /*** Interrupts ***/
284 void __debugbreak(void);
285 #pragma intrinsic(__debugbreak)
286 void _disable(void);
287 #pragma intrinsic(_disable)
288 void _enable(void);
289 #pragma intrinsic(_enable)
290 #if defined(_M_IX86) || defined(_M_AMD64)
291 void __int2c(void);
292 #pragma intrinsic(__int2c)
293 void __halt(void);
294 #pragma intrinsic(__halt)
295 #endif
296
297 /*** Protected memory management ***/
298 #if defined(_M_IX86) || defined(_M_AMD64)
299 void __writecr0(unsigned __int64 Data);
300 #pragma intrinsic(__writecr0)
301 void __writecr3(unsigned __int64 Data);
302 #pragma intrinsic(__writecr3)
303 void __writecr4(unsigned __int64 Data);
304 #pragma intrinsic(__writecr4)
305 #endif
306 #ifdef _M_AMD64
307 void __writecr8(unsigned __int64 Data);
308 #pragma intrinsic(__writecr8)
309 unsigned __int64 __readcr0(void);
310 #pragma intrinsic(__readcr0)
311 unsigned __int64 __readcr2(void);
312 #pragma intrinsic(__readcr2)
313 unsigned __int64 __readcr3(void);
314 #pragma intrinsic(__readcr3)
315 unsigned __int64 __readcr4(void);
316 #pragma intrinsic(__readcr4)
317 unsigned __int64 __readcr8(void);
318 #pragma intrinsic(__readcr8)
319 unsigned __int64 __readdr(unsigned int reg);
320 #pragma intrinsic(__readdr)
321 void __writedr(unsigned reg, unsigned __int64 value);
322 #pragma intrinsic(__writedr)
323 #elif defined(_M_IX86)
324 unsigned long __readcr0(void);
325 unsigned long __readcr2(void);
326 unsigned long __readcr3(void);
327 //unsigned long __readcr4(void);
328 //#pragma intrinsic(__readcr4)
329 // HACK: MSVC is broken
330 unsigned long ___readcr4(void);
331 #define __readcr4 ___readcr4
332
333 unsigned int __readdr(unsigned int reg);
334 void __writedr(unsigned reg, unsigned int value);
335 #endif
336
337 #ifdef _M_IX86
338 // This intrinsic is broken and generates wrong opcodes,
339 // when optimization is enabled!
340 #pragma warning(push)
341 #pragma warning(disable:4711)
342 void __forceinline __invlpg_fixed(void * Address)
343 {
344 _ReadWriteBarrier();
345 __asm
346 {
347 mov eax, Address
348 invlpg [eax]
349 }
350 _ReadWriteBarrier();
351 }
352 #pragma warning(pop)
353 #define __invlpg __invlpg_fixed
354 #elif defined(_M_AMD64)
355 void __invlpg(void * Address);
356 #pragma intrinsic(__invlpg)
357 #endif
358
359 /*** System operations ***/
360 #if defined(_M_IX86) || defined(_M_AMD64)
361 unsigned __int64 __readmsr(int reg);
362 #pragma intrinsic(__readmsr)
363 void __writemsr(unsigned long Register, unsigned __int64 Value);
364 #pragma intrinsic(__writemsr)
365 unsigned __int64 __readpmc(int counter);
366 #pragma intrinsic(__readpmc)
367 unsigned long __segmentlimit(unsigned long a);
368 #pragma intrinsic(__segmentlimit)
369 void __wbinvd(void);
370 #pragma intrinsic(__wbinvd)
371 void __lidt(void *Source);
372 #pragma intrinsic(__lidt)
373 void __sidt(void *Destination);
374 #pragma intrinsic(__sidt)
375 void _mm_pause(void);
376 #pragma intrinsic(_mm_pause)
377 #endif
378
379 #ifdef __cplusplus
380 }
381 #endif
382
383 #endif /* KJK_INTRIN_H_ */
384
385 /* EOF */