[SHELL32]
[reactos.git] / reactos / include / crt / msc / intrin.h
1 #ifndef KJK_INTRIN_H_
2 #define KJK_INTRIN_H_
3
4 #ifdef __cplusplus
5 extern "C" {
6 #endif
7
8 /*** Stack frame juggling ***/
9 void * _ReturnAddress(void);
10 #pragma intrinsic(_ReturnAddress)
11 void * _AddressOfReturnAddress(void);
12 #pragma intrinsic(_AddressOfReturnAddress)
13 unsigned int __getcallerseflags(void);
14 #pragma intrinsic(__getcallerseflags)
15
16 /*** Memory barriers ***/
17 void _ReadWriteBarrier(void);
18 #pragma intrinsic(_ReadWriteBarrier)
19 void _ReadBarrier(void);
20 #pragma intrinsic(_ReadBarrier)
21 void _WriteBarrier(void);
22 #pragma intrinsic(_WriteBarrier)
23 void _mm_mfence(void);
24 #pragma intrinsic(_mm_mfence)
25 void _mm_lfence(void);
26 #pragma intrinsic(_mm_lfence)
27 void _mm_sfence(void);
28 #pragma intrinsic(_mm_sfence)
29 #ifdef _M_AMD64
30 void __faststorefence(void);
31 #pragma intrinsic(__faststorefence)
32 #endif
33
34 /*** Atomic operations ***/
35 long _InterlockedCompareExchange(volatile long * Destination, long Exchange, long Comperand);
36 #pragma intrinsic(_InterlockedCompareExchange)
37 long _InterlockedExchange(volatile long * Target, long Value);
38 #pragma intrinsic(_InterlockedExchange)
39 long _InterlockedExchangeAdd(volatile long * Addend, long Value);
40 #pragma intrinsic(_InterlockedExchangeAdd)
41 char _InterlockedAnd8(volatile char * value, char mask);
42 #pragma intrinsic(_InterlockedAnd8)
43 short _InterlockedAnd16(volatile short * value, short mask);
44 #pragma intrinsic(_InterlockedAnd16)
45 long _InterlockedAnd(volatile long * value, long mask);
46 #pragma intrinsic(_InterlockedAnd)
47 char _InterlockedOr8(volatile char * value, char mask);
48 #pragma intrinsic(_InterlockedOr8)
49 short _InterlockedOr16(volatile short * value, short mask);
50 #pragma intrinsic(_InterlockedOr16)
51 long _InterlockedOr(volatile long * value, long mask);
52 #pragma intrinsic(_InterlockedOr)
53 char _InterlockedXor8(volatile char * value, char mask);
54 #pragma intrinsic(_InterlockedXor8)
55 short _InterlockedXor16(volatile short * value, short mask);
56 #pragma intrinsic(_InterlockedXor16)
57 long _InterlockedXor(volatile long * value, long mask);
58 #pragma intrinsic(_InterlockedXor)
59 long _InterlockedDecrement(volatile long * lpAddend);
60 #pragma intrinsic(_InterlockedDecrement)
61 long _InterlockedIncrement(volatile long * lpAddend);
62 #pragma intrinsic(_InterlockedIncrement)
63 short _InterlockedDecrement16(volatile short * lpAddend);
64 #pragma intrinsic(_InterlockedDecrement16)
65 short _InterlockedIncrement16(volatile short * lpAddend);
66 #pragma intrinsic(_InterlockedIncrement16)
67 unsigned char _interlockedbittestandreset(volatile long * a, long b);
68 #pragma intrinsic(_interlockedbittestandreset)
69 unsigned char _interlockedbittestandset(volatile long * a, long b);
70 #pragma intrinsic(_interlockedbittestandset)
71
72 #if defined(_M_IX86)
73 long _InterlockedAddLargeStatistic(volatile __int64 * Addend, long Value);
74 #pragma intrinsic(_InterlockedAddLargeStatistic)
75 #elif defined(_M_AMD64)
76 __int64 _InterlockedExchange64(volatile __int64 * Target, __int64 Value);
77 #pragma intrinsic(_InterlockedExchange64)
78 __int64 _InterlockedExchangeAdd64(volatile __int64 * Addend, __int64 Value);
79 #pragma intrinsic(_InterlockedExchangeAdd64)
80 void * _InterlockedCompareExchangePointer(void * volatile * Destination, void * Exchange, void * Comperand);
81 #pragma intrinsic(_InterlockedCompareExchangePointer)
82 void * _InterlockedExchangePointer(void * volatile * Target, void * Value);
83 #pragma intrinsic(_InterlockedExchangePointer)
84 __int64 _InterlockedAnd64(volatile __int64 * value, __int64 mask);
85 #pragma intrinsic(_InterlockedAnd64)
86 __int64 _InterlockedOr64(volatile __int64 * value, __int64 mask);
87 #pragma intrinsic(_InterlockedOr64)
88 __int64 _InterlockedCompareExchange64(volatile __int64 * Destination, __int64 Exchange, __int64 Comperand);
89 #pragma intrinsic(_InterlockedCompareExchange64)
90 __int64 _InterlockedDecrement64(volatile __int64 * lpAddend);
91 #pragma intrinsic(_InterlockedDecrement64)
92 __int64 _InterlockedIncrement64(volatile __int64 * lpAddend);
93 #pragma intrinsic(_InterlockedIncrement64)
94 unsigned char _interlockedbittestandreset64(volatile __int64 * a, __int64 b);
95 #pragma intrinsic(_interlockedbittestandreset64)
96 unsigned char _interlockedbittestandset64(volatile __int64 * a, __int64 b);
97 #pragma intrinsic(_interlockedbittestandset64)
98 #endif
99
100 /*** String operations ***/
101 void __stosb(unsigned char * Dest, unsigned char Data, size_t Count);
102 #pragma intrinsic(__stosb)
103 void __stosw(unsigned short * Dest, unsigned short Data, size_t Count);
104 #pragma intrinsic(__stosw)
105 void __stosd(unsigned long * Dest, unsigned long Data, size_t Count);
106 #pragma intrinsic(__stosd)
107 void __movsb(unsigned char * Destination, unsigned char const * Source, size_t Count);
108 #pragma intrinsic(__movsb)
109 void __movsw(unsigned short * Destination, unsigned short const * Source, size_t Count);
110 #pragma intrinsic(__movsw)
111 void __movsd(unsigned long * Destination, unsigned long const * Source, size_t Count);
112 #pragma intrinsic(__movsd)
113 #ifdef _M_AMD64
114 void __movsq(unsigned __int64 * Destination, unsigned __int64 const * Source, size_t Count);
115 #pragma intrinsic(__movsq)
116 #endif
117
118 #if defined(_M_AMD64)
119 /*** GS segment addressing ***/
120 void __writegsbyte(unsigned long Offset, unsigned char Data);
121 #pragma intrinsic(__writegsbyte)
122 void __writegsword(unsigned long Offset, unsigned short Data);
123 #pragma intrinsic(__writegsword)
124 void __writegsdword(unsigned long Offset, unsigned long Data);
125 #pragma intrinsic(__writegsdword)
126 void __writegsqword(unsigned long Offset, unsigned __int64 Data);
127 #pragma intrinsic(__writegsqword)
128 unsigned char __readgsbyte(unsigned long Offset);
129 #pragma intrinsic(__readgsbyte)
130 unsigned short __readgsword(unsigned long Offset);
131 #pragma intrinsic(__readgsword)
132 unsigned long __readgsdword(unsigned long Offset);
133 #pragma intrinsic(__readgsdword)
134 unsigned __int64 __readgsqword(unsigned long Offset);
135 #pragma intrinsic(__readgsqword)
136 void __incgsbyte(unsigned long Offset);
137 #pragma intrinsic(__incgsbyte)
138 void __incgsword(unsigned long Offset);
139 #pragma intrinsic(__incgsword)
140 void __incgsdword(unsigned long Offset);
141 #pragma intrinsic(__incgsdword)
142 void __addgsbyte(unsigned long Offset, unsigned char Data);
143 #pragma intrinsic(__addgsbyte)
144 void __addgsword(unsigned long Offset, unsigned short Data);
145 #pragma intrinsic(__addgsword)
146 void __addgsdword(unsigned long Offset, unsigned int Data);
147 #pragma intrinsic(__addgsdword)
148 void __addgsqword(unsigned long Offset, unsigned __int64 Data);
149 #pragma intrinsic(__addgsqword)
150 #endif
151
152 #if defined(_M_IX86)
153 /*** FS segment addressing ***/
154 void __writefsbyte(unsigned long Offset, unsigned char Data);
155 #pragma intrinsic(__writefsbyte)
156 void __writefsword(unsigned long Offset, unsigned short Data);
157 #pragma intrinsic(__writefsword)
158 void __writefsdword(unsigned long Offset, unsigned long Data);
159 #pragma intrinsic(__writefsdword)
160 unsigned char __readfsbyte(unsigned long Offset);
161 #pragma intrinsic(__readfsbyte)
162 unsigned short __readfsword(unsigned long Offset);
163 #pragma intrinsic(__readfsword)
164 unsigned long __readfsdword(unsigned long Offset);
165 #pragma intrinsic(__readfsdword)
166 void __incfsbyte(unsigned long Offset);
167 #pragma intrinsic(__incfsbyte)
168 void __incfsword(unsigned long Offset);
169 #pragma intrinsic(__incfsword)
170 void __incfsdword(unsigned long Offset);
171 #pragma intrinsic(__incfsdword)
172 void __addfsbyte(unsigned long Offset, unsigned char Data);
173 #pragma intrinsic(__addfsbyte)
174 void __addfsword(unsigned long Offset, unsigned short Data);
175 #pragma intrinsic(__addfsword)
176 void __addfsdword(unsigned long Offset, unsigned int Data);
177 #pragma intrinsic(__addfsdword)
178 #endif
179
180
181 /*** Bit manipulation ***/
182 unsigned char _BitScanForward(unsigned long * Index, unsigned long Mask);
183 #pragma intrinsic(_BitScanForward)
184 unsigned char _BitScanReverse(unsigned long * Index, unsigned long Mask);
185 #pragma intrinsic(_BitScanReverse)
186 unsigned char _bittest(const long * a, long b);
187 #pragma intrinsic(_bittest)
188 unsigned char _bittestandcomplement(long * a, long b);
189 #pragma intrinsic(_bittestandcomplement)
190 unsigned char _bittestandreset(long * a, long b);
191 #pragma intrinsic(_bittestandreset)
192 unsigned char _bittestandset(long * a, long b);
193 #pragma intrinsic(_bittestandset)
194 unsigned char _rotl8(unsigned char value, unsigned char shift);
195 #pragma intrinsic(_rotl8)
196 unsigned short _rotl16(unsigned short value, unsigned char shift);
197 #pragma intrinsic(_rotl16)
198 unsigned int _rotl(unsigned int value, int shift);
199 #pragma intrinsic(_rotl)
200 unsigned int _rotr(unsigned int value, int shift);
201 #pragma intrinsic(_rotr)
202 unsigned char _rotr8(unsigned char value, unsigned char shift);
203 #pragma intrinsic(_rotr8)
204 unsigned short _rotr16(unsigned short value, unsigned char shift);
205 #pragma intrinsic(_rotr16)
206 unsigned __int64 __ll_lshift(unsigned __int64 Mask, int Bit);
207 #pragma intrinsic(__ll_lshift)
208 __int64 __ll_rshift(__int64 Mask, int Bit);
209 #pragma intrinsic(__ll_rshift)
210 unsigned __int64 __ull_rshift(unsigned __int64 Mask, int Bit);
211 #pragma intrinsic(__ull_rshift)
212 unsigned short _byteswap_ushort(unsigned short value);
213 #pragma intrinsic(_byteswap_ushort)
214 unsigned long _byteswap_ulong(unsigned long value);
215 #pragma intrinsic(_byteswap_ulong)
216 unsigned __int64 _byteswap_uint64(unsigned __int64 value);
217 #pragma intrinsic(_byteswap_uint64)
218 #ifdef _M_AMD64
219 unsigned char _bittest64(__int64 const *a, __int64 b);
220 #pragma intrinsic(_bittest64)
221 #endif
222
223 /*** 64-bit math ***/
224 __int64 __emul(int a, int b);
225 #pragma intrinsic(__emul)
226 unsigned __int64 __emulu(unsigned int a, unsigned int b);
227 #pragma intrinsic(__emulu)
228 #ifdef _M_AMD64
229 unsigned __int64 __umulh(unsigned __int64 a, unsigned __int64 b);
230 #pragma intrinsic(__umulh)
231 #endif
232
233 /*** Port I/O ***/
234 unsigned char __inbyte(unsigned short Port);
235 #pragma intrinsic(__inbyte)
236 unsigned short __inword(unsigned short Port);
237 #pragma intrinsic(__inword)
238 unsigned long __indword(unsigned short Port);
239 #pragma intrinsic(__indword)
240 void __inbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
241 #pragma intrinsic(__inbytestring)
242 void __inwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
243 #pragma intrinsic(__inwordstring)
244 void __indwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
245 #pragma intrinsic(__indwordstring)
246 void __outbyte(unsigned short Port, unsigned char Data);
247 #pragma intrinsic(__outbyte)
248 void __outword(unsigned short Port, unsigned short Data);
249 #pragma intrinsic(__outword)
250 void __outdword(unsigned short Port, unsigned long Data);
251 #pragma intrinsic(__outdword)
252 void __outbytestring(unsigned short Port, unsigned char * Buffer, unsigned long Count);
253 #pragma intrinsic(__outbytestring)
254 void __outwordstring(unsigned short Port, unsigned short * Buffer, unsigned long Count);
255 #pragma intrinsic(__outwordstring)
256 void __outdwordstring(unsigned short Port, unsigned long * Buffer, unsigned long Count);
257 #pragma intrinsic(__outdwordstring)
258
259 /*** System information ***/
260 void __cpuid(int CPUInfo[], int InfoType);
261 #pragma intrinsic(__cpuid)
262 unsigned __int64 __rdtsc(void);
263 #pragma intrinsic(__rdtsc)
264 void __writeeflags(uintptr_t Value);
265 #pragma intrinsic(__writeeflags)
266 uintptr_t __readeflags(void);
267 #pragma intrinsic(__readeflags)
268
269 /*** Interrupts ***/
270 void __debugbreak(void);
271 #pragma intrinsic(__debugbreak)
272 void __int2c(void);
273 #pragma intrinsic(__int2c)
274 void _disable(void);
275 #pragma intrinsic(_disable)
276 void _enable(void);
277 #pragma intrinsic(_enable)
278 void __halt(void);
279 #pragma intrinsic(__halt)
280
281 /*** Protected memory management ***/
282 void __writecr0(unsigned __int64 Data);
283 #pragma intrinsic(__writecr0)
284 void __writecr3(unsigned __int64 Data);
285 #pragma intrinsic(__writecr3)
286 void __writecr4(unsigned __int64 Data);
287 #pragma intrinsic(__writecr4)
288
289 #ifdef _M_AMD64
290 void __writecr8(unsigned __int64 Data);
291 #pragma intrinsic(__writecr8)
292 unsigned __int64 __readcr0(void);
293 #pragma intrinsic(__readcr0)
294 unsigned __int64 __readcr2(void);
295 #pragma intrinsic(__readcr2)
296 unsigned __int64 __readcr3(void);
297 #pragma intrinsic(__readcr3)
298 unsigned __int64 __readcr4(void);
299 #pragma intrinsic(__readcr4)
300 unsigned __int64 __readcr8(void);
301 #pragma intrinsic(__readcr8)
302 unsigned __int64 __readdr(unsigned int reg);
303 #pragma intrinsic(__readdr)
304 void __writedr(unsigned reg, unsigned __int64 value);
305 #pragma intrinsic(__writedr)
306 #else
307 unsigned long __readcr0(void);
308 unsigned long __readcr2(void);
309 unsigned long __readcr3(void);
310 //unsigned long __readcr4(void);
311 //#pragma intrinsic(__readcr4)
312 // HACK: MSVC is broken
313 unsigned long ___readcr4(void);
314 #define __readcr4 ___readcr4
315
316 unsigned int __readdr(unsigned int reg);
317 void __writedr(unsigned reg, unsigned int value);
318 #endif
319
320 void __invlpg(void * Address);
321 #pragma intrinsic(__invlpg)
322
323 #ifdef _M_IX86
324 // This intrinsic is broken and generates wrong opcodes,
325 // when optimization is enabled!
326 #pragma warning(push)
327 #pragma warning(disable:4711)
328 void __forceinline __invlpg_fixed(void * Address)
329 {
330 _ReadWriteBarrier();
331 __asm
332 {
333 mov eax, Address
334 invlpg [eax]
335 }
336 _ReadWriteBarrier();
337 }
338 #pragma warning(pop)
339 #define __invlpg __invlpg_fixed
340 #endif
341
342 /*** System operations ***/
343 unsigned __int64 __readmsr(int reg);
344 #pragma intrinsic(__readmsr)
345 void __writemsr(unsigned long Register, unsigned __int64 Value);
346 #pragma intrinsic(__writemsr)
347 unsigned __int64 __readpmc(int counter);
348 #pragma intrinsic(__readpmc)
349 unsigned long __segmentlimit(unsigned long a);
350 #pragma intrinsic(__segmentlimit)
351 void __wbinvd(void);
352 #pragma intrinsic(__wbinvd)
353 void __lidt(void *Source);
354 #pragma intrinsic(__lidt)
355 void __sidt(void *Destination);
356 #pragma intrinsic(__sidt)
357 void _mm_pause(void);
358 #pragma intrinsic(_mm_pause)
359
360 #ifdef __cplusplus
361 }
362 #endif
363
364 #endif /* KJK_INTRIN_H_ */
365
366 /* EOF */