xref: /reactos/sdk/include/vcruntime/msc/intrin.h (revision cc3672cb)
1 
2 #ifdef __cplusplus
3 extern "C" {
4 #endif
5 
6 /*** Stack frame juggling ***/
7 #pragma intrinsic(_ReturnAddress)
8 #pragma intrinsic(_AddressOfReturnAddress)
9 #if defined(_M_IX86) || defined(_M_AMD64)
10 #pragma intrinsic(__getcallerseflags)
11 #endif
12 
13 /*** Memory barriers ***/
14 #pragma intrinsic(_ReadWriteBarrier)
15 #pragma intrinsic(_ReadBarrier)
16 #pragma intrinsic(_WriteBarrier)
17 #if defined(_M_IX86) || defined(_M_AMD64)
18 #pragma intrinsic(_mm_mfence)
19 #pragma intrinsic(_mm_lfence)
20 #pragma intrinsic(_mm_sfence)
21 #endif
22 #if defined(_M_AMD64)
23 #pragma intrinsic(__faststorefence)
24 #elif defined(_M_ARM)
25 #pragma intrinsic(__iso_volatile_load16)
26 #pragma intrinsic(__iso_volatile_load32)
27 #pragma intrinsic(__iso_volatile_load64)
28 #pragma intrinsic(__iso_volatile_load8)
29 #pragma intrinsic(__iso_volatile_store16)
30 #pragma intrinsic(__iso_volatile_store32)
31 #pragma intrinsic(__iso_volatile_store64)
32 #pragma intrinsic(__iso_volatile_store8)
33 #endif
34 
35 /*** Atomic operations ***/
36 #pragma intrinsic(_InterlockedCompareExchange)
37 #pragma intrinsic(_InterlockedCompareExchange8)
38 #pragma intrinsic(_InterlockedCompareExchange16)
39 #pragma intrinsic(_InterlockedCompareExchange64)
40 #pragma intrinsic(_InterlockedExchange)
41 #pragma intrinsic(_InterlockedExchange8)
42 #pragma intrinsic(_InterlockedExchange16)
43 #pragma intrinsic(_InterlockedExchangeAdd)
44 #pragma intrinsic(_InterlockedExchangeAdd8)
45 #pragma intrinsic(_InterlockedExchangeAdd16)
46 #pragma intrinsic(_InterlockedAnd8)
47 #pragma intrinsic(_InterlockedAnd16)
48 #pragma intrinsic(_InterlockedAnd)
49 #pragma intrinsic(_InterlockedOr8)
50 #pragma intrinsic(_InterlockedOr16)
51 #pragma intrinsic(_InterlockedOr)
52 #pragma intrinsic(_InterlockedXor8)
53 #pragma intrinsic(_InterlockedXor16)
54 #pragma intrinsic(_InterlockedXor)
55 #pragma intrinsic(_InterlockedDecrement)
56 #pragma intrinsic(_InterlockedIncrement)
57 #pragma intrinsic(_InterlockedDecrement16)
58 #pragma intrinsic(_InterlockedIncrement16)
59 #pragma intrinsic(_interlockedbittestandreset)
60 #pragma intrinsic(_interlockedbittestandset)
61 #if defined(_M_IX86)
62 #pragma intrinsic(_InterlockedAddLargeStatistic)
63 #elif defined(_M_AMD64)
64 #pragma intrinsic(_InterlockedExchange64)
65 #pragma intrinsic(_InterlockedExchangeAdd64)
66 #pragma intrinsic(_InterlockedCompareExchangePointer)
67 #pragma intrinsic(_InterlockedExchangePointer)
68 #pragma intrinsic(_InterlockedCompareExchange128)
69 #pragma intrinsic(_InterlockedAnd64)
70 #pragma intrinsic(_InterlockedOr64)
71 #pragma intrinsic(_InterlockedDecrement64)
72 #pragma intrinsic(_InterlockedIncrement64)
73 #pragma intrinsic(_interlockedbittestandreset64)
74 #pragma intrinsic(_interlockedbittestandset64)
75 #pragma intrinsic(_InterlockedAnd_np)
76 #pragma intrinsic(_InterlockedAnd8_np)
77 #pragma intrinsic(_InterlockedAnd16_np)
78 #pragma intrinsic(_InterlockedAnd64_np)
79 #pragma intrinsic(_InterlockedCompareExchange16_np)
80 #pragma intrinsic(_InterlockedCompareExchange64_np)
81 #pragma intrinsic(_InterlockedCompareExchange128_np)
82 #pragma intrinsic(_InterlockedCompareExchangePointer_np)
83 #pragma intrinsic(_InterlockedCompareExchange_np)
84 #pragma intrinsic(_InterlockedOr16_np)
85 #pragma intrinsic(_InterlockedOr8_np)
86 #pragma intrinsic(_InterlockedOr_np)
87 #pragma intrinsic(_InterlockedXor16_np)
88 #pragma intrinsic(_InterlockedXor64_np)
89 #pragma intrinsic(_InterlockedXor8_np)
90 #pragma intrinsic(_InterlockedXor_np)
91 #pragma intrinsic(_InterlockedOr64_np)
92 #elif defined(_M_ARM)
93 
94 #endif
95 
96 #if defined(_M_AMD64) || defined(_M_ARM)
97 #endif
98 
99 /*** String operations ***/
100 #if defined(_M_IX86) || defined(_M_AMD64)
101 #pragma intrinsic(__stosb)
102 #pragma intrinsic(__stosw)
103 #pragma intrinsic(__stosd)
104 #pragma intrinsic(__movsb)
105 #pragma intrinsic(__movsw)
106 #pragma intrinsic(__movsd)
107 #endif
108 #ifdef _M_AMD64
109 #pragma intrinsic(__stosq)
110 #pragma intrinsic(__movsq)
111 #endif
112 
113 /*** GS segment addressing ***/
114 #if defined(_M_AMD64)
115 #pragma intrinsic(__writegsbyte)
116 #pragma intrinsic(__writegsword)
117 #pragma intrinsic(__writegsdword)
118 #pragma intrinsic(__writegsqword)
119 #pragma intrinsic(__readgsbyte)
120 #pragma intrinsic(__readgsword)
121 #pragma intrinsic(__readgsdword)
122 #pragma intrinsic(__readgsqword)
123 #pragma intrinsic(__incgsbyte)
124 #pragma intrinsic(__incgsword)
125 #pragma intrinsic(__incgsdword)
126 #pragma intrinsic(__incgsqword)
127 #pragma intrinsic(__addgsbyte)
128 #pragma intrinsic(__addgsword)
129 #pragma intrinsic(__addgsdword)
130 #pragma intrinsic(__addgsqword)
131 #endif
132 
133 /*** FS segment addressing ***/
134 #if defined(_M_IX86)
135 #pragma intrinsic(__writefsbyte)
136 #pragma intrinsic(__writefsword)
137 #pragma intrinsic(__writefsdword)
138 #pragma intrinsic(__writefsdword)
139 #pragma intrinsic(__readfsbyte)
140 #pragma intrinsic(__readfsword)
141 #pragma intrinsic(__readfsdword)
142 #pragma intrinsic(__incfsbyte)
143 #pragma intrinsic(__incfsword)
144 #pragma intrinsic(__incfsdword)
145 #pragma intrinsic(__addfsbyte)
146 #pragma intrinsic(__addfsword)
147 #pragma intrinsic(__addfsdword)
148 #endif
149 
150 /*** Bit manipulation ***/
151 #pragma intrinsic(_BitScanForward)
152 #pragma intrinsic(_BitScanReverse)
153 #ifdef _WIN64
154 #pragma intrinsic(_BitScanForward64)
155 #pragma intrinsic(_BitScanReverse64)
156 #endif
157 #pragma intrinsic(_bittest)
158 #pragma intrinsic(_bittestandcomplement)
159 #pragma intrinsic(_bittestandreset)
160 #pragma intrinsic(_bittestandset)
161 #pragma intrinsic(_rotl8)
162 #pragma intrinsic(_rotl16)
163 #pragma intrinsic(_rotl)
164 #pragma intrinsic(_rotl64)
165 #pragma intrinsic(_lrotl)
166 #pragma intrinsic(_rotr8)
167 #pragma intrinsic(_rotr16)
168 #pragma intrinsic(_rotr)
169 #pragma intrinsic(_rotr64)
170 #pragma intrinsic(_lrotr)
171 #pragma intrinsic(_byteswap_ushort)
172 #pragma intrinsic(_byteswap_ulong)
173 #pragma intrinsic(_byteswap_uint64)
174 #if defined(_M_IX86) || defined(_M_AMD64)
175 #pragma intrinsic(__ll_lshift)
176 #pragma intrinsic(__ll_rshift)
177 #pragma intrinsic(__ull_rshift)
178 #pragma intrinsic(__lzcnt)
179 #pragma intrinsic(__lzcnt16)
180 #pragma intrinsic(__popcnt)
181 #pragma intrinsic(__popcnt16)
182 #endif
183 #ifdef _M_AMD64
184 #pragma intrinsic(__shiftleft128)
185 #pragma intrinsic(__shiftright128)
186 #pragma intrinsic(_bittest64)
187 #pragma intrinsic(_bittestandcomplement64)
188 #pragma intrinsic(_bittestandreset64)
189 #pragma intrinsic(_bittestandset64)
190 #pragma intrinsic(__lzcnt64)
191 #pragma intrinsic(__popcnt64)
192 #elif defined(_M_ARM)
193 
194 #endif
195 
196 /*** 64/128-bit math ***/
197 #pragma intrinsic(_abs64)
198 #if defined(_M_IX86) || defined(_M_AMD64)
199 #pragma intrinsic(__emul)
200 #pragma intrinsic(__emulu)
201 #endif
202 #ifdef _M_AMD64
203 #pragma intrinsic(__mulh)
204 #pragma intrinsic(__umulh)
205 #pragma intrinsic(_mul128)
206 #pragma intrinsic(_umul128)
207 #elif defined(_M_ARM)
208 #pragma intrinsic(_MulHigh)
209 #pragma intrinsic(_MulUnsignedHigh)
210 #endif
211 
212 /** Floating point stuff **/
213 #if defined(_M_ARM)
214 #pragma intrinsic(_isunordered)
215 #pragma intrinsic(_isunorderedf)
216 #pragma intrinsic(_CopyDoubleFromInt64)
217 #pragma intrinsic(_CopyFloatFromInt32)
218 #pragma intrinsic(_CopyInt32FromFloat)
219 #pragma intrinsic(_CopyInt64FromDouble)
220 #endif
221 
222 /*** Port I/O ***/
223 #if defined(_M_IX86) || defined(_M_AMD64)
224 #pragma intrinsic(__inbyte)
225 #pragma intrinsic(__inword)
226 #pragma intrinsic(__indword)
227 #pragma intrinsic(__inbytestring)
228 #pragma intrinsic(__inwordstring)
229 #pragma intrinsic(__indwordstring)
230 #pragma intrinsic(__outbyte)
231 #pragma intrinsic(__outword)
232 #pragma intrinsic(__outdword)
233 #pragma intrinsic(__outbytestring)
234 #pragma intrinsic(__outwordstring)
235 #pragma intrinsic(__outdwordstring)
236 #pragma intrinsic(_inp)
237 #pragma intrinsic(_inpd)
238 #pragma intrinsic(_inpw)
239 #pragma intrinsic(inp)
240 #pragma intrinsic(inpd)
241 #pragma intrinsic(inpw)
242 #pragma intrinsic(_outp)
243 #pragma intrinsic(_outpd)
244 #pragma intrinsic(_outpw)
245 #pragma intrinsic(outp)
246 #pragma intrinsic(outpd)
247 #pragma intrinsic(outpw)
248 #endif
249 
250 /*** System information ***/
251 #if defined(_M_IX86) || defined(_M_AMD64)
252 #pragma intrinsic(__cpuid)
253 #pragma intrinsic(__cpuidex)
254 #pragma intrinsic(__rdtsc)
255 #pragma intrinsic(__rdtscp)
256 #pragma intrinsic(__writeeflags)
257 #pragma intrinsic(__readeflags)
258 #endif
259 
260 /*** Interrupts and traps ***/
261 #pragma intrinsic(__debugbreak)
262 #pragma intrinsic(_disable)
263 #pragma intrinsic(_enable)
264 #if defined(_M_IX86) || defined(_M_AMD64)
265 #pragma intrinsic(__int2c)
266 #pragma intrinsic(__halt)
267 #pragma intrinsic(__ud2)
268 #if (_MSC_VER >= 1700)
269 #pragma intrinsic(__fastfail)
270 #else
271 #if defined(_M_IX86)
272 __declspec(noreturn) __forceinline
273 void __fastfail(unsigned int Code)
274 {
275     __asm
276     {
277         mov ecx, Code
278         int 29h
279     }
280 }
281 #else
282 void __fastfail(unsigned int Code);
283 #endif // defined(_M_IX86)
284 #endif
285 #endif
286 #if defined(_M_ARM)
287 #endif
288 
289 /*** Protected memory management ***/
290 #if defined(_M_IX86) || defined(_M_AMD64)
291 #pragma intrinsic(__writecr0)
292 #pragma intrinsic(__writecr3)
293 #pragma intrinsic(__writecr4)
294 #pragma intrinsic(__writecr8)
295 #endif
296 #if defined(_M_IX86)
297 #pragma intrinsic(__readcr0)
298 #pragma intrinsic(__readcr2)
299 #pragma intrinsic(__readcr3)
300 //#pragma intrinsic(__readcr4)
301 // HACK: MSVC is broken
302 unsigned long __cdecl  ___readcr4(void);
303 #define __readcr4 ___readcr4
304 #pragma intrinsic(__readcr8)
305 #pragma intrinsic(__readdr)
306 #pragma intrinsic(__writedr)
307 // This intrinsic is broken and generates wrong opcodes,
308 // when optimization is enabled!
309 #pragma warning(push)
310 #pragma warning(disable:4711)
311 void  __forceinline __invlpg_fixed(void * Address)
312 {
313     _ReadWriteBarrier();
314    __asm
315    {
316        mov eax, Address
317        invlpg [eax]
318    }
319     _ReadWriteBarrier();
320 }
321 #pragma warning(pop)
322 #define __invlpg __invlpg_fixed
323 #elif defined(_M_AMD64)
324 #pragma intrinsic(__invlpg)
325 #pragma intrinsic(__readcr0)
326 #pragma intrinsic(__readcr2)
327 #pragma intrinsic(__readcr3)
328 #pragma intrinsic(__readcr4)
329 #pragma intrinsic(__readcr8)
330 #pragma intrinsic(__readdr)
331 #pragma intrinsic(__writedr)
332 #elif defined(_M_ARM)
333 #pragma intrinsic(__prefetch)
334 #endif
335 
336 /*** System operations ***/
337 #if defined(_M_IX86) || defined(_M_AMD64)
338 #pragma intrinsic(__readmsr)
339 #pragma intrinsic(__writemsr)
340 #pragma intrinsic(__readpmc)
341 #pragma intrinsic(__segmentlimit)
342 #pragma intrinsic(__wbinvd)
343 #pragma intrinsic(__lidt)
344 #pragma intrinsic(__sidt)
345 #if (_MSC_VER >= 1800)
346 #pragma intrinsic(_sgdt)
347 #else
348 #if defined(_M_IX86)
349 __forceinline
350 void _sgdt(void *Destination)
351 {
352     __asm
353     {
354         mov eax, Destination
355         sgdt [eax]
356     }
357 }
358 #else
359 void _sgdt(void *Destination);
360 #endif // defined(_M_IX86)
361 #endif
362 #pragma intrinsic(_mm_pause)
363 #endif
364 #if defined(_M_ARM)
365 #pragma intrinsic(_MoveFromCoprocessor)
366 #pragma intrinsic(_MoveFromCoprocessor2)
367 #pragma intrinsic(_MoveFromCoprocessor64)
368 #pragma intrinsic(_MoveToCoprocessor)
369 #pragma intrinsic(_MoveToCoprocessor2)
370 #pragma intrinsic(_MoveToCoprocessor64)
371 #pragma intrinsic(_ReadStatusReg)
372 #pragma intrinsic(_WriteStatusReg)
373 #pragma intrinsic(__yield)
374 #pragma intrinsic(__wfe)
375 #pragma intrinsic(__wfi)
376 #pragma intrinsic(__swi)
377 #pragma intrinsic(__hvc)
378 #pragma intrinsic(__ldrexd)
379 #pragma intrinsic(__rdpmccntr64)
380 #pragma intrinsic(__sev)
381 #endif
382 
383 /** Secure virtual machine **/
384 #if defined(_M_IX86) || defined(_M_AMD64)
385 #pragma intrinsic(__svm_clgi)
386 #pragma intrinsic(__svm_invlpga)
387 #pragma intrinsic(__svm_skinit)
388 #pragma intrinsic(__svm_stgi)
389 #pragma intrinsic(__svm_vmload)
390 #pragma intrinsic(__svm_vmrun)
391 #pragma intrinsic(__svm_vmsave)
392 #endif
393 
394 /** Virtual machine extension **/
395 #if defined(_M_IX86) || defined(_M_AMD64)
396 
397 #endif
398 #if defined(_M_AMD64)
399 
400 #endif
401 
402 /** Misc **/
403 #pragma intrinsic(__nop)
404 #if (_MSC_VER >= 1700)
405 #pragma intrinsic(__code_seg)
406 #endif
407 #ifdef _M_ARM
408 #pragma intrinsic(_AddSatInt)
409 #pragma intrinsic(_DAddSatInt)
410 #pragma intrinsic(_DSubSatInt)
411 #pragma intrinsic(_SubSatInt)
412 #pragma intrinsic(__emit)
413 #pragma intrinsic(__static_assert)
414 #endif
415 
416 #ifdef __cplusplus
417 }
418 #endif
419 
420 /* EOF */
421