1 /* Defines common perprocessor and assembly macros for use by various stubs.
2    Copyright (C) 2016-2020 Free Software Foundation, Inc.
3    Contributed by Daniel Santos <daniel.santos@pobox.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
20 
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
24 <http://www.gnu.org/licenses/>.  */
25 
26 #ifndef I386_ASM_H
27 #define I386_ASM_H
28 
29 #include "auto-target.h"
30 
31 #ifdef __GCC_HAVE_DWARF2_CFI_ASM
32 # define cfi_startproc()		.cfi_startproc
33 # define cfi_endproc()			.cfi_endproc
34 # define cfi_adjust_cfa_offset(X) 	.cfi_adjust_cfa_offset X
35 # define cfi_def_cfa_register(X)	.cfi_def_cfa_register X
36 # define cfi_def_cfa(R,O)		.cfi_def_cfa R, O
37 # define cfi_register(D,S)		.cfi_register D, S
38 # define cfi_offset(R,O)		.cfi_offset R, O
39 # ifdef __x86_64__
40 #  define cfi_push(X)		.cfi_adjust_cfa_offset 8; .cfi_rel_offset X, 0
41 #  define cfi_pop(X)		.cfi_adjust_cfa_offset -8; .cfi_restore X
42 # else
43 #  define cfi_push(X)		.cfi_adjust_cfa_offset 4; .cfi_rel_offset X, 0
44 #  define cfi_pop(X)		.cfi_adjust_cfa_offset -4; .cfi_restore X
45 # endif
46 #else
47 # define cfi_startproc()
48 # define cfi_endproc()
49 # define cfi_adjust_cfa_offset(X)
50 # define cfi_def_cfa_register(X)
51 # define cfi_def_cfa(R,O)
52 # define cfi_register(D,S)
53 # define cfi_offset(R,O)
54 # define cfi_push(X)
55 # define cfi_pop(X)
56 #endif
57 
58 #define PASTE2(a, b) PASTE2a(a, b)
59 #define PASTE2a(a, b) a ## b
60 
61 /* These macros currently support GNU/Linux, Solaris and Darwin.  */
62 
63 #ifdef __ELF__
64 # define FN_TYPE(fn) .type fn,@function
65 # define FN_SIZE(fn) .size fn,.-fn
66 # ifdef AS_HIDDEN_DIRECTIVE
67 #  define FN_HIDDEN(fn) AS_HIDDEN_DIRECTIVE fn
68 # endif
69 #else
70 # define FN_TYPE(fn)
71 # define FN_SIZE(fn)
72 #endif
73 
74 #ifndef FN_HIDDEN
75 # define FN_HIDDEN(fn)
76 #endif
77 
78 #ifdef __USER_LABEL_PREFIX__
79 # define ASMNAME(name)		PASTE2(__USER_LABEL_PREFIX__, name)
80 #else
81 # define ASMNAME(name)		name
82 #endif
83 
84 #define FUNC_BEGIN(fn)		\
85 	.globl ASMNAME(fn);	\
86 	FN_TYPE (ASMNAME(fn));	\
87 ASMNAME(fn):
88 
89 #define HIDDEN_FUNC(fn)		\
90 	.globl ASMNAME(fn);	\
91 	FN_TYPE(ASMNAME(fn));	\
92 	FN_HIDDEN(ASMNAME(fn));	\
93 ASMNAME(fn):
94 
95 #define FUNC_END(fn) FN_SIZE(ASMNAME(fn))
96 
97 #ifdef MS2SYSV_STUB_AVX
98 # define MS2SYSV_STUB_PREFIX __avx_
99 # ifdef HAVE_AS_AVX
100 #  define MOVAPS vmovaps
101 # endif
102 #elif defined(MS2SYSV_STUB_SSE)
103 # define MS2SYSV_STUB_PREFIX __sse_
104 # define MOVAPS movaps
105 #endif
106 
107 #ifdef MS2SYSV_STUB_PREFIX
108 
109 # define MS2SYSV_STUB_BEGIN(base_name) \
110 	HIDDEN_FUNC(PASTE2(MS2SYSV_STUB_PREFIX, base_name))
111 
112 # define MS2SYSV_STUB_END(base_name) \
113 	FUNC_END(PASTE2(MS2SYSV_STUB_PREFIX, base_name))
114 
115 /* If expanding for sse or avx and we have assembler support.  */
116 # ifdef MOVAPS
117 /* Save SSE registers 6-15 using rax as the base address.  */
118 #  define SSE_SAVE		   \
119 	MOVAPS %xmm15,-0x30(%rax); \
120 	MOVAPS %xmm14,-0x20(%rax); \
121 	MOVAPS %xmm13,-0x10(%rax); \
122 	MOVAPS %xmm12,     (%rax); \
123 	MOVAPS %xmm11, 0x10(%rax); \
124 	MOVAPS %xmm10, 0x20(%rax); \
125 	MOVAPS %xmm9,  0x30(%rax); \
126 	MOVAPS %xmm8,  0x40(%rax); \
127 	MOVAPS %xmm7,  0x50(%rax); \
128 	MOVAPS %xmm6,  0x60(%rax)
129 
130 /* Restore SSE registers 6-15 using rsi as the base address.  */
131 #  define SSE_RESTORE		    \
132 	MOVAPS -0x30(%rsi), %xmm15; \
133 	MOVAPS -0x20(%rsi), %xmm14; \
134 	MOVAPS -0x10(%rsi), %xmm13; \
135 	MOVAPS      (%rsi), %xmm12; \
136 	MOVAPS  0x10(%rsi), %xmm11; \
137 	MOVAPS  0x20(%rsi), %xmm10; \
138 	MOVAPS  0x30(%rsi), %xmm9 ; \
139 	MOVAPS  0x40(%rsi), %xmm8 ; \
140 	MOVAPS  0x50(%rsi), %xmm7 ; \
141 	MOVAPS  0x60(%rsi), %xmm6
142 # else /* MOVAPS */
143 /* If the assembler doesn't support AVX then directly emit machine code
144    for the instructions above.  */
145 #  define SSE_SAVE							     \
146 	.byte 0xc5, 0x78, 0x29, 0x78, 0xd0; /* vmovaps %xmm15,-0x30(%rax) */ \
147 	.byte 0xc5, 0x78, 0x29, 0x70, 0xe0; /* vmovaps %xmm14,-0x20(%rax) */ \
148 	.byte 0xc5, 0x78, 0x29, 0x68, 0xf0; /* vmovaps %xmm13,-0x10(%rax) */ \
149 	.byte 0xc5, 0x78, 0x29, 0x20;       /* vmovaps %xmm12,     (%rax) */ \
150 	.byte 0xc5, 0x78, 0x29, 0x58, 0x10; /* vmovaps %xmm11, 0x10(%rax) */ \
151 	.byte 0xc5, 0x78, 0x29, 0x50, 0x20; /* vmovaps %xmm10, 0x20(%rax) */ \
152 	.byte 0xc5, 0x78, 0x29, 0x48, 0x30; /* vmovaps %xmm9,  0x30(%rax) */ \
153 	.byte 0xc5, 0x78, 0x29, 0x40, 0x40; /* vmovaps %xmm8,  0x40(%rax) */ \
154 	.byte 0xc5, 0xf8, 0x29, 0x78, 0x50; /* vmovaps %xmm7,  0x50(%rax) */ \
155 	.byte 0xc5, 0xf8, 0x29, 0x70, 0x60; /* vmovaps %xmm6,  0x60(%rax) */
156 #  define SSE_RESTORE							     \
157 	.byte 0xc5, 0x78, 0x28, 0x7e, 0xd0; /* vmovaps -0x30(%rsi),%xmm15 */ \
158 	.byte 0xc5, 0x78, 0x28, 0x76, 0xe0; /* vmovaps -0x20(%rsi),%xmm14 */ \
159 	.byte 0xc5, 0x78, 0x28, 0x6e, 0xf0; /* vmovaps -0x10(%rsi),%xmm13 */ \
160 	.byte 0xc5, 0x78, 0x28, 0x26;       /* vmovaps      (%rsi),%xmm12 */ \
161 	.byte 0xc5, 0x78, 0x28, 0x5e, 0x10; /* vmovaps  0x10(%rsi),%xmm11 */ \
162 	.byte 0xc5, 0x78, 0x28, 0x56, 0x20; /* vmovaps  0x20(%rsi),%xmm10 */ \
163 	.byte 0xc5, 0x78, 0x28, 0x4e, 0x30; /* vmovaps  0x30(%rsi),%xmm9  */ \
164 	.byte 0xc5, 0x78, 0x28, 0x46, 0x40; /* vmovaps  0x40(%rsi),%xmm8  */ \
165 	.byte 0xc5, 0xf8, 0x28, 0x7e, 0x50; /* vmovaps  0x50(%rsi),%xmm7  */ \
166 	.byte 0xc5, 0xf8, 0x28, 0x76, 0x60; /* vmovaps  0x60(%rsi),%xmm6  */
167 # endif /* MOVAPS */
168 #endif /* MS2SYSV_STUB_PREFIX */
169 #endif /* I386_ASM_H */
170