1 /*
2  * amd64-codegen.h: Macros for generating amd64 code
3  *
4  * Authors:
5  *   Paolo Molaro (lupus@ximian.com)
6  *   Intel Corporation (ORP Project)
7  *   Sergey Chaban (serge@wildwestsoftware.com)
8  *   Dietmar Maurer (dietmar@ximian.com)
9  *   Patrik Torstensson
10  *   Zalman Stern
11  *
12  * Copyright (C)  2000 Intel Corporation.  All rights reserved.
13  * Copyright (C)  2001, 2002 Ximian, Inc.
14  * Licensed under the MIT license. See LICENSE file in the project root for full license information.
15  */
16 
17 #ifndef AMD64_H
18 #define AMD64_H
19 
20 // Conventions in this file:
21 
22 // body: implementation. other macros call this one
23 // disp: displacement
24 // inst: instruction
25 // is_half: short if true, byte if false (then why is it named is_half...?)
26 // imm: immediate
27 // mem: read from (immediate-supplied address?)
28 // membase: read from address in a base register plus a displacement
29 // memindex: SIP addressing: (address in base register) + (displacement in index register)<<(shift)
30 // reg: register, encode modR/M bits 00
31 // regp: register, encode modR/M bits 11
32 // size: Expected 1,2,4 or 8
33 // widen: extends from 1 or 2 bytes
34 
35 #include <glib.h>
36 
37 typedef enum {
38 	AMD64_RAX = 0,
39 	AMD64_RCX = 1,
40 	AMD64_RDX = 2,
41 	AMD64_RBX = 3,
42 	AMD64_RSP = 4,
43 	AMD64_RBP = 5,
44 	AMD64_RSI = 6,
45 	AMD64_RDI = 7,
46 	AMD64_R8 = 8,
47 	AMD64_R9 = 9,
48 	AMD64_R10 = 10,
49 	AMD64_R11 = 11,
50 	AMD64_R12 = 12,
51 	AMD64_R13 = 13,
52 	AMD64_R14 = 14,
53 	AMD64_R15 = 15,
54 	AMD64_RIP = 16,
55 	AMD64_NREG
56 } AMD64_Reg_No;
57 
58 typedef enum {
59 	AMD64_XMM0 = 0,
60 	AMD64_XMM1 = 1,
61 	AMD64_XMM2 = 2,
62 	AMD64_XMM3 = 3,
63 	AMD64_XMM4 = 4,
64 	AMD64_XMM5 = 5,
65 	AMD64_XMM6 = 6,
66 	AMD64_XMM7 = 7,
67 	AMD64_XMM8 = 8,
68 	AMD64_XMM9 = 9,
69 	AMD64_XMM10 = 10,
70 	AMD64_XMM11 = 11,
71 	AMD64_XMM12 = 12,
72 	AMD64_XMM13 = 13,
73 	AMD64_XMM14 = 14,
74 	AMD64_XMM15 = 15,
75 	AMD64_XMM_NREG = 16,
76 } AMD64_XMM_Reg_No;
77 
78 typedef enum
79 {
80   AMD64_REX_B = 1, /* The register in r/m field, base register in SIB byte, or reg in opcode is 8-15 rather than 0-7 */
81   AMD64_REX_X = 2, /* The index register in SIB byte is 8-15 rather than 0-7 */
82   AMD64_REX_R = 4, /* The reg field of ModRM byte is 8-15 rather than 0-7 */
83   AMD64_REX_W = 8  /* Opeartion is 64-bits instead of 32 (default) or 16 (with 0x66 prefix) */
84 } AMD64_REX_Bits;
85 
86 #define amd64_codegen_pre(inst)
87 #define amd64_codegen_post(inst)
88 
89 #ifdef TARGET_WIN32
90 #define AMD64_ARG_REG1 AMD64_RCX
91 #define AMD64_ARG_REG2 AMD64_RDX
92 #define AMD64_ARG_REG3 AMD64_R8
93 #define AMD64_ARG_REG4 AMD64_R9
94 #else
95 #define AMD64_ARG_REG1 AMD64_RDI
96 #define AMD64_ARG_REG2 AMD64_RSI
97 #define AMD64_ARG_REG3 AMD64_RDX
98 #define AMD64_ARG_REG4 AMD64_RCX
99 #endif
100 
101 #ifdef TARGET_WIN32
102 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
103 #define AMD64_IS_CALLEE_REG(reg)  (AMD64_CALLEE_REGS & (1 << (reg)))
104 
105 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
106 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
107 
108 /* xmm0-xmm3 for standard calling convention, additionally xmm4-xmm5 for __vectorcall (not currently used) */
109 #define AMD64_ARGUMENT_XREGS ((1<<AMD64_XMM0) | (1<<AMD64_XMM1) | (1<<AMD64_XMM2) | (1<<AMD64_XMM3) | (1<<AMD64_XMM4) | (1<<AMD64_XMM5))
110 #define AMD64_IS_ARGUMENT_XREG(reg) (AMD64_ARGUMENT_XREGS & (1 << (reg)))
111 
112 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
113 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
114 #else
115 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
116 #define AMD64_IS_CALLEE_REG(reg)  (AMD64_CALLEE_REGS & (1 << (reg)))
117 
118 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
119 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
120 
121 #define AMD64_ARGUMENT_XREGS ((1<<AMD64_XMM0) | (1<<AMD64_XMM1) | (1<<AMD64_XMM2) | (1<<AMD64_XMM3) | (1<<AMD64_XMM4) | (1<<AMD64_XMM5) | (1<<AMD64_XMM6) | (1<<AMD64_XMM7))
122 #define AMD64_IS_ARGUMENT_XREG(reg) (AMD64_ARGUMENT_XREGS & (1 << (reg)))
123 
124 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
125 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
126 #endif
127 
128 #define AMD64_REX(bits) ((unsigned char)(0x40 | (bits)))
129 #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
130 	{ \
131 		unsigned char _amd64_rex_bits = \
132 			(((width) > 4) ? AMD64_REX_W : 0) | \
133 			(((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
134 			(((reg_index) > 7) ? AMD64_REX_X : 0) | \
135 			(((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
136 		if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
137 	} while (0)
138 
139 typedef union {
140 	guint64 val;
141 	unsigned char b [8];
142 } amd64_imm_buf;
143 
144 #include "../x86/x86-codegen.h"
145 
146 /* In 64 bit mode, all registers have a low byte subregister */
147 #undef X86_IS_BYTE_REG
148 #define X86_IS_BYTE_REG(reg) 1
149 
150 #define amd64_modrm_mod(modrm) ((modrm) >> 6)
151 #define amd64_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
152 #define amd64_modrm_rm(modrm) ((modrm) & 0x7)
153 
154 #define amd64_rex_r(rex) ((((rex) >> 2) & 0x1) << 3)
155 #define amd64_rex_x(rex) ((((rex) >> 1) & 0x1) << 3)
156 #define amd64_rex_b(rex) ((((rex) >> 0) & 0x1) << 3)
157 
158 #define amd64_sib_scale(sib) ((sib) >> 6)
159 #define amd64_sib_index(sib) (((sib) >> 3) & 0x7)
160 #define amd64_sib_base(sib) ((sib) & 0x7)
161 
162 #define amd64_is_imm32(val) ((gint64)val >= -((gint64)1<<31) && (gint64)val <= (((gint64)1<<31)-1))
163 
164 #define x86_imm_emit64(inst,imm)     \
165 	do {	\
166 			amd64_imm_buf imb; 	\
167 			imb.val = (guint64) (imm);	\
168 			*(inst)++ = imb.b [0];	\
169 			*(inst)++ = imb.b [1];	\
170 			*(inst)++ = imb.b [2];	\
171 			*(inst)++ = imb.b [3];	\
172 			*(inst)++ = imb.b [4];	\
173 			*(inst)++ = imb.b [5];	\
174 			*(inst)++ = imb.b [6];	\
175 			*(inst)++ = imb.b [7];	\
176 	} while (0)
177 
178 #define amd64_membase_emit(inst,reg,basereg,disp) do { \
179 	if ((basereg) == AMD64_RIP) { \
180         x86_address_byte ((inst), 0, (reg)&0x7, 5); \
181         x86_imm_emit32 ((inst), (disp)); \
182     } \
183 	else \
184 		x86_membase_emit ((inst),(reg)&0x7, (basereg)&0x7, (disp)); \
185 } while (0)
186 
187 #define amd64_alu_reg_imm_size_body(inst,opc,reg,imm,size) \
188 	do {	\
189 		if (x86_is_imm8((imm))) {	\
190 			amd64_emit_rex(inst, size, 0, 0, (reg)); \
191 			*(inst)++ = (unsigned char)0x83;	\
192 			x86_reg_emit ((inst), (opc), (reg));	\
193 			x86_imm_emit8 ((inst), (imm));	\
194 		} else if ((reg) == AMD64_RAX) {	\
195 			amd64_emit_rex(inst, size, 0, 0, 0); \
196 			*(inst)++ = (((unsigned char)(opc)) << 3) + 5;	\
197 			x86_imm_emit32 ((inst), (imm));	\
198 		} else {	\
199 			amd64_emit_rex(inst, size, 0, 0, (reg)); \
200 			*(inst)++ = (unsigned char)0x81;	\
201 			x86_reg_emit ((inst), (opc), (reg));	\
202 			x86_imm_emit32 ((inst), (imm));	\
203 		}	\
204 	} while (0)
205 
206 #define amd64_alu_reg_reg_size_body(inst,opc,dreg,reg,size)	\
207 	do {	\
208 		amd64_emit_rex(inst, size, (dreg), 0, (reg)); \
209 		*(inst)++ = (((unsigned char)(opc)) << 3) + 3;	\
210 		x86_reg_emit ((inst), (dreg), (reg));	\
211 	} while (0)
212 
213 #define amd64_test_reg_imm_size_body(inst,reg,imm,size) \
214 	do { \
215 		amd64_codegen_pre(inst); \
216 		amd64_emit_rex ((inst),(size),0,0,(reg)); \
217 		if ((reg) == AMD64_RAX) { \
218 			*(inst)++ = (unsigned char)0xa9; \
219 		} \
220 		else { \
221 			*(inst)++ = (unsigned char)0xf7;	\
222 			x86_reg_emit((inst), 0, (reg));	\
223 		} \
224 		x86_imm_emit32((inst), (imm));	\
225 		amd64_codegen_post(inst); \
226 	} while (0)
227 
228 #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
229 	amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), (size))
230 
231 #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
232 		amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), (size))
233 
234 #define amd64_test_reg_imm_size(inst, reg, imm, size) \
235 		amd64_test_reg_imm_size_body(inst, reg, imm, size)
236 
237 #define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size((inst),(opc),(reg),(imm),8)
238 
239 #define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size ((inst),(opc),(dreg),(reg),8)
240 
241 #define amd64_test_reg_imm(inst,reg,imm) amd64_test_reg_imm_size(inst,reg,imm,8)
242 
243 #define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) \
244 	do { \
245 		amd64_codegen_pre(inst);						  \
246 		amd64_emit_rex ((inst),(size),(reg),0,(basereg)); \
247 		*(inst)++ = (((unsigned char)(opc)) << 3) + 3;	\
248 		amd64_membase_emit (inst, reg, basereg, disp); \
249 		amd64_codegen_post(inst);					   \
250 } while (0)
251 
252 #define amd64_mov_regp_reg(inst,regp,reg,size)	\
253 	do {	\
254 		amd64_codegen_pre(inst); \
255 		if ((size) == 2) \
256 			x86_prefix((inst), X86_OPERAND_PREFIX); \
257 		amd64_emit_rex(inst, (size), (reg), 0, (regp)); \
258 		switch ((size)) {	\
259 		case 1: *(inst)++ = (unsigned char)0x88; break;	\
260 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break;	\
261 		default: assert (0);	\
262 		}	\
263 		x86_regp_emit ((inst), (reg), (regp));	\
264 		amd64_codegen_post(inst); \
265 	} while (0)
266 
267 #define amd64_mov_membase_reg(inst,basereg,disp,reg,size)	\
268 	do {	\
269 		amd64_codegen_pre(inst); \
270 		if ((size) == 2) \
271 			x86_prefix((inst), X86_OPERAND_PREFIX); \
272 		amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
273 		switch ((size)) {	\
274 		case 1: *(inst)++ = (unsigned char)0x88; break;	\
275 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break;	\
276 		default: assert (0);	\
277 		}	\
278 		x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp));	\
279 		amd64_codegen_post(inst); \
280 	} while (0)
281 
282 #define amd64_mov_mem_reg(inst,mem,reg,size)	\
283 	do {	\
284 		amd64_codegen_pre(inst); \
285 		if ((size) == 2) \
286 			x86_prefix((inst), X86_OPERAND_PREFIX); \
287 		amd64_emit_rex(inst, (size), (reg), 0, 0); \
288 		switch ((size)) {	\
289 		case 1: *(inst)++ = (unsigned char)0x88; break;	\
290 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break;	\
291 		default: assert (0);	\
292 		}	\
293 		x86_address_byte ((inst), 0, (reg), 4); \
294 		x86_address_byte ((inst), 0, 4, 5); \
295 		x86_imm_emit32 ((inst), (mem)); \
296 		amd64_codegen_post(inst); \
297 	} while (0)
298 
299 #define amd64_mov_reg_reg(inst,dreg,reg,size)	\
300 	do {	\
301 		amd64_codegen_pre(inst); \
302 		if ((size) == 2) \
303 			x86_prefix((inst), X86_OPERAND_PREFIX); \
304 		amd64_emit_rex(inst, (size), (dreg), 0, (reg)); \
305 		switch ((size)) {	\
306 		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
307 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break;	\
308 		default: assert (0);	\
309 		}	\
310 		x86_reg_emit ((inst), (dreg), (reg));	\
311 		amd64_codegen_post(inst); \
312 	} while (0)
313 
314 #define amd64_mov_reg_mem_body(inst,reg,mem,size)	\
315 	do {	\
316 		amd64_codegen_pre(inst); \
317 		if ((size) == 2) \
318 			x86_prefix((inst), X86_OPERAND_PREFIX); \
319 		amd64_emit_rex(inst, (size), (reg), 0, 0); \
320 		switch ((size)) {	\
321 		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
322 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break;	\
323 		default: assert (0);	\
324 		}	\
325 		x86_address_byte ((inst), 0, (reg), 4); \
326 		x86_address_byte ((inst), 0, 4, 5); \
327 		x86_imm_emit32 ((inst), (mem)); \
328 		amd64_codegen_post(inst); \
329 	} while (0)
330 
331 #define amd64_mov_reg_mem(inst,reg,mem,size)	\
332 	do {    \
333 		amd64_mov_reg_mem_body((inst),(reg),(mem),(size)); \
334 	} while (0)
335 
336 #define amd64_mov_reg_membase_body(inst,reg,basereg,disp,size)	\
337 	do {	\
338 		if ((size) == 2) \
339 			x86_prefix((inst), X86_OPERAND_PREFIX); \
340 		amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
341 		switch ((size)) {	\
342 		case 1: *(inst)++ = (unsigned char)0x8a; break;	\
343 		case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break;	\
344 		default: assert (0);	\
345 		}	\
346 		amd64_membase_emit ((inst), (reg), (basereg), (disp));	\
347 	} while (0)
348 
349 #define amd64_mov_reg_memindex_size_body(inst,reg,basereg,disp,indexreg,shift,size) \
350 	do { \
351 		amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); \
352 		x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); \
353 	} while (0)
354 
355 #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) \
356 	amd64_mov_reg_memindex_size_body((inst),(reg),(basereg),(disp),(indexreg),(shift),(size))
357 #define amd64_mov_reg_membase(inst,reg,basereg,disp,size)	\
358 	do {	\
359 		amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), (size)); \
360 	} while (0)
361 
362 #define amd64_movzx_reg_membase(inst,reg,basereg,disp,size)	\
363 	do {	\
364 		amd64_codegen_pre(inst); \
365 		amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
366 		switch ((size)) {	\
367 		case 1: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb6; break;	\
368 		case 2: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb7; break;	\
369 		case 4: case 8: *(inst)++ = (unsigned char)0x8b; break;	\
370 		default: assert (0);	\
371 		}	\
372 		x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp));	\
373 		amd64_codegen_post(inst); \
374 	} while (0)
375 
376 #define amd64_movsxd_reg_mem(inst,reg,mem) \
377     do {     \
378 	amd64_codegen_pre(inst); \
379 	amd64_emit_rex(inst,8,(reg),0,0); \
380 	*(inst)++ = (unsigned char)0x63; \
381 	x86_mem_emit ((inst), ((reg)&0x7), (mem)); \
382 	amd64_codegen_post(inst); \
383     } while (0)
384 
385 #define amd64_movsxd_reg_membase(inst,reg,basereg,disp) \
386     do {     \
387 	amd64_codegen_pre(inst); \
388 	amd64_emit_rex(inst,8,(reg),0,(basereg)); \
389 	*(inst)++ = (unsigned char)0x63; \
390 	x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
391 	amd64_codegen_post(inst); \
392     } while (0)
393 
394 #define amd64_movsxd_reg_reg(inst,dreg,reg) \
395     do {     \
396 	amd64_codegen_pre(inst); \
397 	amd64_emit_rex(inst,8,(dreg),0,(reg)); \
398 	*(inst)++ = (unsigned char)0x63; \
399 	x86_reg_emit ((inst), (dreg), (reg));	\
400 	amd64_codegen_post(inst); \
401     } while (0)
402 
403 /* Pretty much the only instruction that supports a 64-bit immediate. Optimize for common case of
404  * 32-bit immediate. Pepper with casts to avoid warnings.
405  */
406 #define amd64_mov_reg_imm_size(inst,reg,imm,size)	\
407 	do {	\
408 		amd64_codegen_pre(inst); \
409 		amd64_emit_rex(inst, (size), 0, 0, (reg)); \
410 		*(inst)++ = (unsigned char)0xb8 + ((reg) & 0x7);	\
411 		if ((size) == 8) \
412 			x86_imm_emit64 ((inst), (guint64)(imm));	\
413 		else \
414 			x86_imm_emit32 ((inst), (int)(guint64)(imm));	\
415 		amd64_codegen_post(inst); \
416 	} while (0)
417 
418 #define amd64_mov_reg_imm(inst,reg,imm)	\
419 	do {	\
420 		int _amd64_width_temp = ((guint64)(imm) == (guint64)(int)(guint64)(imm)); \
421 		amd64_codegen_pre(inst); \
422 		amd64_mov_reg_imm_size ((inst), (reg), (imm), (_amd64_width_temp ? 4 : 8)); \
423 		amd64_codegen_post(inst); \
424 	} while (0)
425 
426 #define amd64_set_reg_template(inst,reg) amd64_mov_reg_imm_size ((inst),(reg), 0, 8)
427 
428 #define amd64_set_template(inst,reg) amd64_set_reg_template((inst),(reg))
429 
430 #define amd64_mov_membase_imm(inst,basereg,disp,imm,size)	\
431 	do {	\
432 		amd64_codegen_pre(inst); \
433 		if ((size) == 2) \
434 			x86_prefix((inst), X86_OPERAND_PREFIX); \
435 		amd64_emit_rex(inst, (size) == 1 ? 0 : (size), 0, 0, (basereg)); \
436 		if ((size) == 1) {	\
437 			*(inst)++ = (unsigned char)0xc6;	\
438 			x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp));	\
439 			x86_imm_emit8 ((inst), (imm));	\
440 		} else if ((size) == 2) {	\
441 			*(inst)++ = (unsigned char)0xc7;	\
442 			x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp));	\
443 			x86_imm_emit16 ((inst), (imm));	\
444 		} else {	\
445 			*(inst)++ = (unsigned char)0xc7;	\
446 			x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp));	\
447 			x86_imm_emit32 ((inst), (imm));	\
448 		}	\
449 		amd64_codegen_post(inst); \
450 	} while (0)
451 
452 
453 #define amd64_lea_membase_body(inst,reg,basereg,disp)	\
454 	do {	\
455 		amd64_emit_rex(inst, 8, (reg), 0, (basereg)); \
456 		*(inst)++ = (unsigned char)0x8d;	\
457 		amd64_membase_emit ((inst), (reg), (basereg), (disp));	\
458 	} while (0)
459 
460 #define amd64_lea_membase(inst,reg,basereg,disp) \
461 	amd64_lea_membase_body((inst), (reg), (basereg), (disp))
462 
463 /* Instruction are implicitly 64-bits so don't generate REX for just the size. */
464 #define amd64_push_reg(inst,reg)	\
465 	do {	\
466 		amd64_codegen_pre(inst); \
467 		amd64_emit_rex(inst, 0, 0, 0, (reg)); \
468 		*(inst)++ = (unsigned char)0x50 + ((reg) & 0x7);	\
469 		amd64_codegen_post(inst); \
470 	} while (0)
471 
472 /* Instruction is implicitly 64-bits so don't generate REX for just the size. */
473 #define amd64_push_membase(inst,basereg,disp)	\
474 	do {	\
475 		amd64_codegen_pre(inst); \
476 		amd64_emit_rex(inst, 0, 0, 0, (basereg)); \
477 		*(inst)++ = (unsigned char)0xff;	\
478 		x86_membase_emit ((inst), 6, (basereg) & 0x7, (disp));	\
479 		amd64_codegen_post(inst); \
480 	} while (0)
481 
482 #define amd64_pop_reg_body(inst,reg)	\
483 	do {	\
484 		amd64_codegen_pre(inst);  \
485 		amd64_emit_rex(inst, 0, 0, 0, (reg)); \
486 		*(inst)++ = (unsigned char)0x58 + ((reg) & 0x7);	\
487 		amd64_codegen_post(inst);  \
488 	} while (0)
489 
490 #define amd64_call_reg(inst,reg)	\
491 	do {	\
492 		amd64_emit_rex(inst, 0, 0, 0, (reg)); \
493 		*(inst)++ = (unsigned char)0xff;	\
494 		x86_reg_emit ((inst), 2, ((reg) & 0x7));	\
495 	} while (0)
496 
497 
498 #define amd64_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
499 #define amd64_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
500 
501 #define amd64_pop_reg(inst,reg) amd64_pop_reg_body((inst), (reg))
502 
503 #define amd64_movsd_reg_regp(inst,reg,regp)	\
504 	do {	\
505 		amd64_codegen_pre(inst); \
506 		x86_prefix((inst), 0xf2); \
507 		amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
508 		*(inst)++ = (unsigned char)0x0f;	\
509 		*(inst)++ = (unsigned char)0x10;	\
510 		x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7);	\
511 		amd64_codegen_post(inst); \
512 	} while (0)
513 
514 #define amd64_movsd_regp_reg(inst,regp,reg)	\
515 	do {	\
516 		amd64_codegen_pre(inst); \
517 		x86_prefix((inst), 0xf2); \
518 		amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
519 		*(inst)++ = (unsigned char)0x0f;	\
520 		*(inst)++ = (unsigned char)0x11;	\
521 		x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7);	\
522 		amd64_codegen_post(inst); \
523 	} while (0)
524 
525 #define amd64_movss_reg_regp(inst,reg,regp)	\
526 	do {	\
527 		amd64_codegen_pre(inst); \
528 		x86_prefix((inst), 0xf3); \
529 		amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
530 		*(inst)++ = (unsigned char)0x0f;	\
531 		*(inst)++ = (unsigned char)0x10;	\
532 		x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7);	\
533 		amd64_codegen_post(inst); \
534 	} while (0)
535 
536 #define amd64_movss_regp_reg(inst,regp,reg)	\
537 	do {	\
538 		amd64_codegen_pre(inst); \
539 		x86_prefix((inst), 0xf3); \
540 		amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
541 		*(inst)++ = (unsigned char)0x0f;	\
542 		*(inst)++ = (unsigned char)0x11;	\
543 		x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7);	\
544 		amd64_codegen_post(inst); \
545 	} while (0)
546 
547 #define amd64_movdqu_reg_membase(inst,reg,basereg,disp)	\
548 	do {	\
549 		amd64_codegen_pre(inst); \
550 		x86_prefix((inst), 0xf3); \
551 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
552 		*(inst)++ = (unsigned char)0x0f;	\
553 		*(inst)++ = (unsigned char)0x6f;	\
554 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
555 		amd64_codegen_post(inst); \
556 	} while (0)
557 
558 #define amd64_movsd_reg_membase(inst,reg,basereg,disp)	\
559 	do {	\
560 		amd64_codegen_pre(inst); \
561 		x86_prefix((inst), 0xf2); \
562 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
563 		*(inst)++ = (unsigned char)0x0f;	\
564 		*(inst)++ = (unsigned char)0x10;	\
565 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
566 		amd64_codegen_post(inst); \
567 	} while (0)
568 
569 #define amd64_movss_reg_membase(inst,reg,basereg,disp)	\
570 	do {	\
571 		amd64_codegen_pre(inst); \
572 		x86_prefix((inst), 0xf3); \
573 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
574 		*(inst)++ = (unsigned char)0x0f;	\
575 		*(inst)++ = (unsigned char)0x10;	\
576 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
577 		amd64_codegen_post(inst); \
578 	} while (0)
579 
580 #define amd64_movdqu_membase_reg(inst,basereg,disp,reg)	\
581 	do {	\
582 		amd64_codegen_pre(inst); \
583 		x86_prefix((inst), 0xf3); \
584 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
585 		*(inst)++ = (unsigned char)0x0f;	\
586 		*(inst)++ = (unsigned char)0x7f;	\
587 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
588 		amd64_codegen_post(inst); \
589 	} while (0)
590 
591 #define amd64_movsd_membase_reg(inst,basereg,disp,reg)	\
592 	do {	\
593 		amd64_codegen_pre(inst); \
594 		x86_prefix((inst), 0xf2); \
595 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
596 		*(inst)++ = (unsigned char)0x0f;	\
597 		*(inst)++ = (unsigned char)0x11;	\
598 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
599 		amd64_codegen_post(inst); \
600 	} while (0)
601 
602 #define amd64_movss_membase_reg(inst,basereg,disp,reg)	\
603 	do {	\
604 		amd64_codegen_pre(inst); \
605 		x86_prefix((inst), 0xf3); \
606 		amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
607 		*(inst)++ = (unsigned char)0x0f;	\
608 		*(inst)++ = (unsigned char)0x11;	\
609 		x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp));	\
610 		amd64_codegen_post(inst); \
611 	} while (0)
612 
613 /* The original inc_reg opcode is used as the REX prefix */
614 #define amd64_inc_reg_size(inst,reg,size) \
615 	do { \
616 		amd64_codegen_pre(inst); \
617 		amd64_emit_rex ((inst),(size),0,0,(reg)); \
618 		*(inst)++ = (unsigned char)0xff; \
619 		x86_reg_emit ((inst),0,(reg) & 0x7); \
620 		amd64_codegen_post(inst); \
621 	} while (0)
622 
623 #define amd64_dec_reg_size(inst,reg,size) \
624 	do { \
625 		amd64_codegen_pre(inst); \
626 		amd64_emit_rex ((inst),(size),0,0,(reg)); \
627 		*(inst)++ = (unsigned char)0xff; \
628 		x86_reg_emit ((inst),1,(reg) & 0x7); \
629 		amd64_codegen_post(inst); \
630 	} while (0)
631 
632 #define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { \
633 	amd64_codegen_pre(inst); \
634 	amd64_emit_rex ((inst),0,0,0,(basereg)); \
635 	*(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9;	\
636 	amd64_membase_emit ((inst), 0, (basereg), (disp));	\
637 	amd64_codegen_post(inst); \
638 } while (0)
639 
640 /* From the AMD64 Software Optimization Manual */
641 #define amd64_padding_size(inst,size) \
642     do { \
643 	    switch ((size)) {								  \
644         case 1: *(inst)++ = 0x90; break;						  \
645         case 2: *(inst)++ = 0x66; *(inst)++ = 0x90; break;			  \
646         case 3: *(inst)++ = 0x66; *(inst)++ = 0x66; *(inst)++ = 0x90; break; \
647 		default: amd64_emit_rex ((inst),8,0,0,0); x86_padding ((inst), (size) - 1); \
648 		}; \
649 		} while (0)
650 
651 #define amd64_call_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst),2, (basereg),(disp)); } while (0)
652 #define amd64_jump_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst), 4, (basereg), (disp)); } while (0)
653 
654 #define amd64_jump_code_size(inst,target,size) do { \
655 	if (amd64_is_imm32 ((gint64)(target) - (gint64)(inst))) {		\
656 		x86_jump_code((inst),(target));									\
657 	} else {															\
658 	    amd64_jump_membase ((inst), AMD64_RIP, 0);							\
659 		*(guint64*)(inst) = (guint64)(target);							\
660 		(inst) += 8; \
661 	} \
662 } while (0)
663 
664 /*
665  * SSE
666  */
667 
668 //TODO Reorganize SSE opcode defines.
669 
670 /* Two opcode SSE defines */
671 
672 #define emit_sse_reg_reg_op2_size(inst,dreg,reg,op1,op2,size) do { \
673     amd64_codegen_pre(inst); \
674     amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
675     *(inst)++ = (unsigned char)(op1); \
676     *(inst)++ = (unsigned char)(op2); \
677     x86_reg_emit ((inst), (dreg), (reg)); \
678     amd64_codegen_post(inst); \
679 } while (0)
680 
681 #define emit_sse_reg_reg_op2(inst,dreg,reg,op1,op2) emit_sse_reg_reg_op2_size ((inst), (dreg), (reg), (op1), (op2), 0)
682 
683 #define emit_sse_reg_reg_op2_imm(inst,dreg,reg,op1,op2,imm) do { \
684    amd64_codegen_pre(inst); \
685    emit_sse_reg_reg_op2 ((inst), (dreg), (reg), (op1), (op2)); \
686    x86_imm_emit8 ((inst), (imm)); \
687    amd64_codegen_post(inst); \
688 } while (0)
689 
690 #define emit_sse_membase_reg_op2(inst,basereg,disp,reg,op1,op2) do { \
691     amd64_codegen_pre(inst); \
692     amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
693     *(inst)++ = (unsigned char)(op1); \
694     *(inst)++ = (unsigned char)(op2); \
695     amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
696     amd64_codegen_post(inst); \
697 } while (0)
698 
699 #define emit_sse_reg_membase_op2(inst,dreg,basereg,disp,op1,op2) do { \
700     amd64_codegen_pre(inst); \
701     amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
702     *(inst)++ = (unsigned char)(op1); \
703     *(inst)++ = (unsigned char)(op2); \
704     amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
705     amd64_codegen_post(inst); \
706 } while (0)
707 
708 /* Three opcode SSE defines */
709 
710 #define emit_opcode3(inst,op1,op2,op3) do { \
711    *(inst)++ = (unsigned char)(op1); \
712    *(inst)++ = (unsigned char)(op2); \
713    *(inst)++ = (unsigned char)(op3); \
714 } while (0)
715 
716 #define emit_sse_reg_reg_size(inst,dreg,reg,op1,op2,op3,size) do { \
717     amd64_codegen_pre(inst); \
718     *(inst)++ = (unsigned char)(op1); \
719 	amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
720     *(inst)++ = (unsigned char)(op2); \
721     *(inst)++ = (unsigned char)(op3); \
722     x86_reg_emit ((inst), (dreg), (reg)); \
723     amd64_codegen_post(inst); \
724 } while (0)
725 
726 #define emit_sse_reg_reg(inst,dreg,reg,op1,op2,op3) emit_sse_reg_reg_size ((inst), (dreg), (reg), (op1), (op2), (op3), 0)
727 
728 #define emit_sse_reg_reg_imm(inst,dreg,reg,op1,op2,op3,imm) do { \
729    amd64_codegen_pre(inst); \
730    emit_sse_reg_reg ((inst), (dreg), (reg), (op1), (op2), (op3)); \
731    x86_imm_emit8 ((inst), (imm)); \
732    amd64_codegen_post(inst); \
733 } while (0)
734 
735 #define emit_sse_membase_reg(inst,basereg,disp,reg,op1,op2,op3) do { \
736     amd64_codegen_pre(inst); \
737     x86_prefix((inst), (unsigned char)(op1)); \
738     amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
739     *(inst)++ = (unsigned char)(op2); \
740     *(inst)++ = (unsigned char)(op3); \
741     amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
742     amd64_codegen_post(inst); \
743 } while (0)
744 
745 #define emit_sse_reg_membase(inst,dreg,basereg,disp,op1,op2,op3) do { \
746     amd64_codegen_pre(inst); \
747     x86_prefix((inst), (unsigned char)(op1)); \
748     amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
749     *(inst)++ = (unsigned char)(op2); \
750     *(inst)++ = (unsigned char)(op3); \
751     amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
752     amd64_codegen_post(inst); \
753 } while (0)
754 
755 /* Four opcode SSE defines */
756 
757 #define emit_sse_reg_reg_op4_size(inst,dreg,reg,op1,op2,op3,op4,size) do { \
758     amd64_codegen_pre(inst); \
759     x86_prefix((inst), (unsigned char)(op1)); \
760     amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
761     *(inst)++ = (unsigned char)(op2); \
762     *(inst)++ = (unsigned char)(op3); \
763     *(inst)++ = (unsigned char)(op4); \
764     x86_reg_emit ((inst), (dreg), (reg)); \
765     amd64_codegen_post(inst); \
766 } while (0)
767 
768 #define emit_sse_reg_reg_op4(inst,dreg,reg,op1,op2,op3,op4) emit_sse_reg_reg_op4_size ((inst), (dreg), (reg), (op1), (op2), (op3), (op4), 0)
769 
770 /* specific SSE opcode defines */
771 
772 #define amd64_sse_xorpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg), 0x66, 0x0f, 0x57)
773 
774 #define amd64_sse_xorpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x57)
775 
776 #define amd64_sse_andpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x54)
777 
778 #define amd64_sse_movsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x10)
779 #define amd64_sse_movss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x10)
780 
781 #define amd64_sse_movsd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf2, 0x0f, 0x10)
782 
783 #define amd64_sse_movsd_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf2, 0x0f, 0x11)
784 
785 #define amd64_sse_movss_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf3, 0x0f, 0x11)
786 
787 #define amd64_sse_movss_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf3, 0x0f, 0x10)
788 
789 #define amd64_sse_comisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2f)
790 #define amd64_sse_comiss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x67,0x0f,0x2f)
791 
792 #define amd64_sse_comisd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x2f)
793 
794 #define amd64_sse_ucomisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2e)
795 
796 #define amd64_sse_cvtsd2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2d, 8)
797 #define amd64_sse_cvtss2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2d, 8)
798 
799 #define amd64_sse_cvttsd2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2c, (size))
800 #define amd64_sse_cvtss2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2c, (size))
801 
802 #define amd64_sse_cvttsd2si_reg_reg(inst,dreg,reg) amd64_sse_cvttsd2si_reg_reg_size ((inst), (dreg), (reg), 8)
803 
804 #define amd64_sse_cvtsi2sd_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2a, (size))
805 
806 #define amd64_sse_cvtsi2sd_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2sd_reg_reg_size ((inst), (dreg), (reg), 8)
807 
808 #define amd64_sse_cvtsi2ss_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2a, (size))
809 
810 #define amd64_sse_cvtsi2ss_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2ss_reg_reg_size ((inst), (dreg), (reg), 8)
811 
812 #define amd64_sse_cvtsd2ss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5a)
813 
814 #define amd64_sse_cvtss2sd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5a)
815 
816 #define amd64_sse_addsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x58)
817 #define amd64_sse_addss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x58)
818 
819 #define amd64_sse_subsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5c)
820 #define amd64_sse_subss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5c)
821 
822 #define amd64_sse_mulsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x59)
823 #define amd64_sse_mulss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x59)
824 
825 #define amd64_sse_divsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5e)
826 #define amd64_sse_divss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5e)
827 
828 #define amd64_sse_sqrtsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x51)
829 
830 
831 #define amd64_sse_pinsrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc4, (imm))
832 
833 #define amd64_sse_pextrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc5, (imm))
834 
835 
836 #define amd64_sse_cvttsd2si_reg_xreg_size(inst,reg,xreg,size) emit_sse_reg_reg_size ((inst), (reg), (xreg), 0xf2, 0x0f, 0x2c, (size))
837 
838 
839 #define amd64_sse_addps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x58)
840 
841 #define amd64_sse_divps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5e)
842 
843 #define amd64_sse_mulps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x59)
844 
845 #define amd64_sse_subps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5c)
846 
847 #define amd64_sse_maxps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5f)
848 
849 #define amd64_sse_minps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5d)
850 
851 #define amd64_sse_cmpps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xc2, (imm))
852 
853 #define amd64_sse_andps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x54)
854 
855 #define amd64_sse_andnps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x55)
856 
857 #define amd64_sse_orps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x56)
858 
859 #define amd64_sse_xorps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x57)
860 
861 #define amd64_sse_sqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x51)
862 
863 #define amd64_sse_rsqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x52)
864 
865 #define amd64_sse_rcpps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x53)
866 
867 #define amd64_sse_addsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0xd0)
868 
869 #define amd64_sse_haddps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7c)
870 
871 #define amd64_sse_hsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7d)
872 
873 #define amd64_sse_movshdup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x16)
874 
875 #define amd64_sse_movsldup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x12)
876 
877 
878 #define amd64_sse_pshufhw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf3, 0x0f, 0x70, (imm))
879 
880 #define amd64_sse_pshuflw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf2, 0x0f, 0x70, (imm))
881 
882 #define amd64_sse_pshufd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0x70, (imm))
883 
884 #define amd64_sse_shufps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xC6, (imm))
885 
886 #define amd64_sse_shufpd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xC6, (imm))
887 
888 
889 #define amd64_sse_addpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x58)
890 
891 #define amd64_sse_divpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5e)
892 
893 #define amd64_sse_mulpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x59)
894 
895 #define amd64_sse_subpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5c)
896 
897 #define amd64_sse_maxpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5f)
898 
899 #define amd64_sse_minpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5d)
900 
901 #define amd64_sse_cmppd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xc2, (imm))
902 
903 #define amd64_sse_andpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x54)
904 
905 #define amd64_sse_andnpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x55)
906 
907 #define amd64_sse_orpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x56)
908 
909 #define amd64_sse_sqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x51)
910 
911 #define amd64_sse_rsqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x52)
912 
913 #define amd64_sse_rcppd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x53)
914 
915 #define amd64_sse_addsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd0)
916 
917 #define amd64_sse_haddpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7c)
918 
919 #define amd64_sse_hsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7d)
920 
921 #define amd64_sse_movddup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x12)
922 
923 
924 #define amd64_sse_pmovmskb_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd7)
925 
926 
927 #define amd64_sse_pand_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdb)
928 
929 #define amd64_sse_por_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xeb)
930 
931 #define amd64_sse_pxor_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xef)
932 
933 
934 #define amd64_sse_paddb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfc)
935 
936 #define amd64_sse_paddw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfd)
937 
938 #define amd64_sse_paddd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfe)
939 
940 #define amd64_sse_paddq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd4)
941 
942 
943 #define amd64_sse_psubb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf8)
944 
945 #define amd64_sse_psubw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf9)
946 
947 #define amd64_sse_psubd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfa)
948 
949 #define amd64_sse_psubq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfb)
950 
951 
952 #define amd64_sse_pmaxub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xde)
953 
954 #define amd64_sse_pmaxuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3e)
955 
956 #define amd64_sse_pmaxud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3f)
957 
958 
959 #define amd64_sse_pmaxsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3c)
960 
961 #define amd64_sse_pmaxsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xee)
962 
963 #define amd64_sse_pmaxsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3d)
964 
965 
966 #define amd64_sse_pavgb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe0)
967 
968 #define amd64_sse_pavgw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
969 
970 
971 #define amd64_sse_pminub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xda)
972 
973 #define amd64_sse_pminuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3a)
974 
975 #define amd64_sse_pminud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3b)
976 
977 
978 #define amd64_sse_pminsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x38)
979 
980 #define amd64_sse_pminsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xea)
981 
982 #define amd64_sse_pminsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x39)
983 
984 
985 #define amd64_sse_pcmpeqb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x74)
986 
987 #define amd64_sse_pcmpeqw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x75)
988 
989 #define amd64_sse_pcmpeqd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x76)
990 
991 #define amd64_sse_pcmpeqq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x29)
992 
993 
994 #define amd64_sse_pcmpgtb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x64)
995 
996 #define amd64_sse_pcmpgtw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x65)
997 
998 #define amd64_sse_pcmpgtd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x66)
999 
1000 #define amd64_sse_pcmpgtq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x37)
1001 
1002 
1003 #define amd64_sse_psadbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf6)
1004 
1005 
1006 #define amd64_sse_punpcklbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x60)
1007 
1008 #define amd64_sse_punpcklwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x61)
1009 
1010 #define amd64_sse_punpckldq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x62)
1011 
1012 #define amd64_sse_punpcklqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6c)
1013 
1014 #define amd64_sse_unpcklpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x14)
1015 
1016 #define amd64_sse_unpcklps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x14)
1017 
1018 
1019 #define amd64_sse_punpckhbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x68)
1020 
1021 #define amd64_sse_punpckhwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x69)
1022 
1023 #define amd64_sse_punpckhdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6a)
1024 
1025 #define amd64_sse_punpckhqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6d)
1026 
1027 #define amd64_sse_unpckhpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x15)
1028 
1029 #define amd64_sse_unpckhps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x15)
1030 
1031 
1032 #define amd64_sse_packsswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x63)
1033 
1034 #define amd64_sse_packssdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6b)
1035 
1036 #define amd64_sse_packuswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x67)
1037 
1038 #define amd64_sse_packusdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x2b)
1039 
1040 
1041 #define amd64_sse_paddusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdc)
1042 
1043 #define amd64_sse_psubusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1044 
1045 #define amd64_sse_paddusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdd)
1046 
1047 #define amd64_sse_psubusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1048 
1049 
1050 #define amd64_sse_paddsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xec)
1051 
1052 #define amd64_sse_psubsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe8)
1053 
1054 #define amd64_sse_paddsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xed)
1055 
1056 #define amd64_sse_psubsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe9)
1057 
1058 
1059 #define amd64_sse_pmullw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd5)
1060 
1061 #define amd64_sse_pmulld_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x40)
1062 
1063 #define amd64_sse_pmuludq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf4)
1064 
1065 #define amd64_sse_pmulhuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe4)
1066 
1067 #define amd64_sse_pmulhw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe5)
1068 
1069 
1070 #define amd64_sse_psrlw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x71, (imm))
1071 
1072 #define amd64_sse_psrlw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd1)
1073 
1074 
1075 #define amd64_sse_psraw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x71, (imm))
1076 
1077 #define amd64_sse_psraw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe1)
1078 
1079 
1080 #define amd64_sse_psllw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x71, (imm))
1081 
1082 #define amd64_sse_psllw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf1)
1083 
1084 
1085 #define amd64_sse_psrld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x72, (imm))
1086 
1087 #define amd64_sse_psrld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd2)
1088 
1089 
1090 #define amd64_sse_psrad_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x72, (imm))
1091 
1092 #define amd64_sse_psrad_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe2)
1093 
1094 
1095 #define amd64_sse_pslld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x72, (imm))
1096 
1097 #define amd64_sse_pslld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf2)
1098 
1099 
1100 #define amd64_sse_psrlq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x73, (imm))
1101 
1102 #define amd64_sse_psrlq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd3)
1103 
1104 
1105 #define amd64_sse_psraq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x73, (imm))
1106 
1107 #define amd64_sse_psraq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
1108 
1109 
1110 #define amd64_sse_psllq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x73, (imm))
1111 
1112 #define amd64_sse_psllq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf3)
1113 
1114 
1115 #define amd64_sse_cvtdq2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0xE6)
1116 
1117 #define amd64_sse_cvtdq2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5B)
1118 
1119 #define amd64_sse_cvtpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF2, 0x0F, 0xE6)
1120 
1121 #define amd64_sse_cvtpd2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5A)
1122 
1123 #define amd64_sse_cvtps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5B)
1124 
1125 #define amd64_sse_cvtps2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5A)
1126 
1127 #define amd64_sse_cvttpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0xE6)
1128 
1129 #define amd64_sse_cvttps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0x5B)
1130 
1131 
1132 #define amd64_movd_xreg_reg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (dreg), (sreg), 0x66, 0x0f, 0x6e, (size))
1133 
1134 #define amd64_movd_reg_xreg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (sreg), (dreg), 0x66, 0x0f, 0x7e, (size))
1135 
1136 #define amd64_movd_xreg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x6e)
1137 
1138 
1139 #define amd64_movlhps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x16)
1140 
1141 #define amd64_movhlps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x12)
1142 
1143 
1144 #define amd64_sse_movups_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x11)
1145 
1146 #define amd64_sse_movups_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x10)
1147 
1148 #define amd64_sse_movaps_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x29)
1149 
1150 #define amd64_sse_movaps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x28)
1151 
1152 #define amd64_sse_movaps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x28)
1153 
1154 #define amd64_sse_movntps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x2b)
1155 
1156 #define amd64_sse_prefetch_reg_membase(inst, arg, basereg, disp) emit_sse_reg_membase_op2((inst), (arg), (basereg), (disp), 0x0f, 0x18)
1157 
1158 /* Generated from x86-codegen.h */
1159 
1160 #define amd64_breakpoint_size(inst,size) do { x86_breakpoint(inst); } while (0)
1161 #define amd64_cld_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_cld(inst); amd64_codegen_post(inst); } while (0)
1162 #define amd64_stosb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosb(inst); amd64_codegen_post(inst); } while (0)
1163 #define amd64_stosl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosl(inst); amd64_codegen_post(inst); } while (0)
1164 #define amd64_stosd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosd(inst); amd64_codegen_post(inst); } while (0)
1165 #define amd64_movsb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsb(inst); amd64_codegen_post(inst); } while (0)
1166 #define amd64_movsl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsl(inst); amd64_codegen_post(inst); } while (0)
1167 #define amd64_movsd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsd(inst); amd64_codegen_post(inst); } while (0)
1168 #define amd64_prefix_size(inst,p,size) do { x86_prefix((inst), p); } while (0)
1169 #define amd64_rdtsc_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_rdtsc(inst); amd64_codegen_post(inst); } while (0)
1170 #define amd64_cmpxchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmpxchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1171 #define amd64_cmpxchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmpxchg_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1172 #define amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_cmpxchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1173 #define amd64_xchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1174 #define amd64_xchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xchg_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1175 #define amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1176 #define amd64_inc_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_inc_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1177 #define amd64_inc_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_inc_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1178 //#define amd64_inc_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_inc_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1179 #define amd64_dec_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_dec_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1180 #define amd64_dec_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_dec_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1181 //#define amd64_dec_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_dec_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1182 #define amd64_not_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_not_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1183 #define amd64_not_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_not_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1184 #define amd64_not_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_not_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1185 #define amd64_neg_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_neg_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1186 #define amd64_neg_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_neg_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1187 #define amd64_neg_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_neg_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1188 #define amd64_nop_size(inst,size) do { amd64_codegen_pre(inst); x86_nop(inst); amd64_codegen_post(inst); } while (0)
1189 //#define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1190 #define amd64_alu_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_alu_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1191 #define amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1192 #define amd64_alu_membase8_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase8_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1193 #define amd64_alu_mem_reg_size(inst,opc,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_mem_reg((inst),(opc),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1194 #define amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_membase_reg((inst),(opc),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1195 //#define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg_reg((inst),(opc),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1196 #define amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg8_reg8((inst),(opc),((dreg)&0x7),((reg)&0x7),(is_dreg_h),(is_reg_h)); amd64_codegen_post(inst); } while (0)
1197 #define amd64_alu_reg_mem_size(inst,opc,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_mem((inst),(opc),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1198 //#define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_reg_membase((inst),(opc),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1199 //#define amd64_test_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1200 #define amd64_test_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_test_mem_imm((inst),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1201 #define amd64_test_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_test_membase_imm((inst),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1202 #define amd64_test_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_test_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1203 #define amd64_test_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1204 #define amd64_test_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_test_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1205 #define amd64_shift_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1206 #define amd64_shift_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1207 #define amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_shift_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1208 #define amd64_shift_reg_size(inst,opc,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg((inst),(opc),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1209 #define amd64_shift_mem_size(inst,opc,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem((inst),(opc),(mem)); amd64_codegen_post(inst); } while (0)
1210 #define amd64_shift_membase_size(inst,opc,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_shift_membase((inst),(opc),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1211 #define amd64_shrd_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1212 #define amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1213 #define amd64_shld_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1214 #define amd64_shld_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1215 #define amd64_mul_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mul_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1216 #define amd64_mul_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mul_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1217 #define amd64_mul_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mul_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1218 #define amd64_imul_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1219 #define amd64_imul_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1220 #define amd64_imul_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1221 #define amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1222 #define amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem_imm((inst),((reg)&0x7),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1223 #define amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase_imm((inst),((reg)&0x7),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1224 #define amd64_div_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_div_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1225 #define amd64_div_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_div_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1226 #define amd64_div_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_div_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1227 #define amd64_mov_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1228 //#define amd64_mov_regp_reg_size(inst,regp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(regp),0,(reg)); x86_mov_regp_reg((inst),(regp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1229 //#define amd64_mov_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1230 #define amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_memindex_reg((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1231 #define amd64_mov_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_mov_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1232 //#define amd64_mov_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_mem((inst),((reg)&0x7),(mem),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1233 //#define amd64_mov_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1234 //#define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1235 #define amd64_clear_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_clear_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1236 //#define amd64_mov_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1237 #define amd64_mov_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mov_mem_imm((inst),(mem),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1238 //#define amd64_mov_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mov_membase_imm((inst),((basereg)&0x7),(disp),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1239 #define amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_mov_memindex_imm((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1240 #define amd64_lea_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_lea_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1241 //#define amd64_lea_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_lea_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1242 #define amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_lea_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1243 #define amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_widen_reg((inst),((dreg)&0x7),((reg)&0x7),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1244 #define amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,0); x86_widen_mem((inst),((dreg)&0x7),(mem),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1245 #define amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(basereg)); x86_widen_membase((inst),((dreg)&0x7),((basereg)&0x7),(disp),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1246 #define amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),(indexreg),(basereg)); x86_widen_memindex((inst),((dreg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1247 #define amd64_cdq_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_cdq(inst); amd64_codegen_post(inst); } while (0)
1248 #define amd64_wait_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_wait(inst); amd64_codegen_post(inst); } while (0)
1249 #define amd64_fp_op_mem_size(inst,opc,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_mem((inst),(opc),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1250 #define amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1251 #define amd64_fp_op_size(inst,opc,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op((inst),(opc),(index)); amd64_codegen_post(inst); } while (0)
1252 #define amd64_fp_op_reg_size(inst,opc,index,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_reg((inst),(opc),(index),(pop_stack)); amd64_codegen_post(inst); } while (0)
1253 #define amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_int_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1254 #define amd64_fstp_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fstp((inst),(index)); amd64_codegen_post(inst); } while (0)
1255 #define amd64_fcompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcompp(inst); amd64_codegen_post(inst); } while (0)
1256 #define amd64_fucompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucompp(inst); amd64_codegen_post(inst); } while (0)
1257 #define amd64_fnstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstsw(inst); amd64_codegen_post(inst); } while (0)
1258 #define amd64_fnstcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1259 #define amd64_fnstcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fnstcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1260 #define amd64_fldcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1261 #define amd64_fldcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fldcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1262 #define amd64_fchs_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fchs(inst); amd64_codegen_post(inst); } while (0)
1263 #define amd64_frem_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_frem(inst); amd64_codegen_post(inst); } while (0)
1264 #define amd64_fxch_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fxch((inst),(index)); amd64_codegen_post(inst); } while (0)
1265 #define amd64_fcomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1266 #define amd64_fcomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1267 #define amd64_fucomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1268 #define amd64_fucomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1269 #define amd64_fld_size(inst,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld((inst),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1270 //#define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fld_membase((inst),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1271 #define amd64_fld80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1272 #define amd64_fld80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fld80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1273 #define amd64_fild_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fild((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1274 #define amd64_fild_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fild_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1275 #define amd64_fld_reg_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld_reg((inst),(index)); amd64_codegen_post(inst); } while (0)
1276 #define amd64_fldz_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldz(inst); amd64_codegen_post(inst); } while (0)
1277 #define amd64_fld1_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld1(inst); amd64_codegen_post(inst); } while (0)
1278 #define amd64_fldpi_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldpi(inst); amd64_codegen_post(inst); } while (0)
1279 #define amd64_fst_size(inst,mem,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst((inst),(mem),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1280 #define amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst_membase((inst),((basereg)&0x7),(disp),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1281 #define amd64_fst80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1282 #define amd64_fst80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1283 #define amd64_fist_pop_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fist_pop((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1284 #define amd64_fist_pop_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_pop_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1285 #define amd64_fstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_fstsw(inst); amd64_codegen_post(inst); } while (0)
1286 #define amd64_fist_membase_size(inst,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_membase((inst),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1287 //#define amd64_push_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1288 #define amd64_push_regp_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_regp((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1289 #define amd64_push_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_push_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1290 //#define amd64_push_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_push_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1291 #define amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_push_memindex((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1292 #define amd64_push_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_push_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1293 //#define amd64_pop_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_pop_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1294 #define amd64_pop_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pop_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1295 #define amd64_pop_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_pop_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1296 #define amd64_pushad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushad(inst); amd64_codegen_post(inst); } while (0)
1297 #define amd64_pushfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushfd(inst); amd64_codegen_post(inst); } while (0)
1298 #define amd64_popad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popad(inst); amd64_codegen_post(inst); } while (0)
1299 #define amd64_popfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popfd(inst); amd64_codegen_post(inst); } while (0)
1300 #define amd64_loop_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loop((inst),(imm)); amd64_codegen_post(inst); } while (0)
1301 #define amd64_loope_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loope((inst),(imm)); amd64_codegen_post(inst); } while (0)
1302 #define amd64_loopne_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loopne((inst),(imm)); amd64_codegen_post(inst); } while (0)
1303 #define amd64_jump32_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump32((inst),(imm)); amd64_codegen_post(inst); } while (0)
1304 #define amd64_jump8_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump8((inst),(imm)); amd64_codegen_post(inst); } while (0)
1305 /* Defined above for Native Client, so they can be used in other macros */
1306 #define amd64_jump_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),0,0,0,(reg)); x86_jump_reg((inst),((reg)&0x7)); } while (0)
1307 #define amd64_jump_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump_mem((inst),(mem)); } while (0)
1308 #define amd64_jump_disp_size(inst,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_jump_disp((inst),(disp)); amd64_codegen_post(inst); } while (0)
1309 #define amd64_branch8_size(inst,cond,imm,is_signed,size) do { x86_branch8((inst),(cond),(imm),(is_signed)); } while (0)
1310 #define amd64_branch32_size(inst,cond,imm,is_signed,size) do { x86_branch32((inst),(cond),(imm),(is_signed)); } while (0)
1311 #define amd64_branch_size_body(inst,cond,target,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch((inst),(cond),(target),(is_signed)); amd64_codegen_post(inst); } while (0)
1312 #define amd64_branch_size(inst,cond,target,is_signed,size) do { amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); } while (0)
1313 
1314 #define amd64_branch_disp_size(inst,cond,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch_disp((inst),(cond),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1315 #define amd64_set_reg_size(inst,cond,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex((inst),1,0,0,(reg)); x86_set_reg((inst),(cond),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1316 #define amd64_set_mem_size(inst,cond,mem,is_signed,size) do { amd64_codegen_pre(inst); x86_set_mem((inst),(cond),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1317 #define amd64_set_membase_size(inst,cond,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_set_membase((inst),(cond),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1318 //#define amd64_call_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_call_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1319 #define amd64_call_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_call_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1320 
1321 #define amd64_call_imm_size(inst,disp,size) do { x86_call_imm((inst),(disp)); } while (0)
1322 #define amd64_call_code_size(inst,target,size) do { x86_call_code((inst),(target)); } while (0)
1323 
1324 //#define amd64_ret_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret(inst); amd64_codegen_post(inst); } while (0)
1325 #define amd64_ret_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1326 #define amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmov_reg((inst),(cond),(is_signed),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1327 #define amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmov_mem((inst),(cond),(is_signed),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1328 #define amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_cmov_membase((inst),(cond),(is_signed),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1329 #define amd64_enter_size(inst,framesize) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_enter((inst),(framesize)); amd64_codegen_post(inst); } while (0)
1330 //#define amd64_leave_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_leave(inst); amd64_codegen_post(inst); } while (0)
1331 #define amd64_sahf_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_sahf(inst); amd64_codegen_post(inst); } while (0)
1332 #define amd64_fsin_size(inst,size) do { amd64_codegen_pre(inst); x86_fsin(inst); amd64_codegen_post(inst); } while (0)
1333 #define amd64_fcos_size(inst,size) do { amd64_codegen_pre(inst); x86_fcos(inst); amd64_codegen_post(inst); } while (0)
1334 #define amd64_fabs_size(inst,size) do { amd64_codegen_pre(inst); x86_fabs(inst); amd64_codegen_post(inst); } while (0)
1335 #define amd64_ftst_size(inst,size) do { amd64_codegen_pre(inst); x86_ftst(inst); amd64_codegen_post(inst); } while (0)
1336 #define amd64_fxam_size(inst,size) do { amd64_codegen_pre(inst); x86_fxam(inst); amd64_codegen_post(inst); } while (0)
1337 #define amd64_fpatan_size(inst,size) do { amd64_codegen_pre(inst); x86_fpatan(inst); amd64_codegen_post(inst); } while (0)
1338 #define amd64_fprem_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem(inst); amd64_codegen_post(inst); } while (0)
1339 #define amd64_fprem1_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem1(inst); amd64_codegen_post(inst); } while (0)
1340 #define amd64_frndint_size(inst,size) do { amd64_codegen_pre(inst); x86_frndint(inst); amd64_codegen_post(inst); } while (0)
1341 #define amd64_fsqrt_size(inst,size) do { amd64_codegen_pre(inst); x86_fsqrt(inst); amd64_codegen_post(inst); } while (0)
1342 #define amd64_fptan_size(inst,size) do { amd64_codegen_pre(inst); x86_fptan(inst); amd64_codegen_post(inst); } while (0)
1343 //#define amd64_padding_size(inst,size) do { amd64_codegen_pre(inst); x86_padding((inst),(size)); amd64_codegen_post(inst); } while (0)
1344 #define amd64_prolog_size(inst,frame_size,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_prolog((inst),(frame_size),(reg_mask)); amd64_codegen_post(inst); } while (0)
1345 #define amd64_epilog_size(inst,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_epilog((inst),(reg_mask)); amd64_codegen_post(inst); } while (0)
1346 #define amd64_xadd_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xadd_reg_reg ((inst), (dreg), (reg), (size)); amd64_codegen_post(inst); } while (0)
1347 #define amd64_xadd_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xadd_mem_reg((inst),(mem),((reg)&0x7), (size)); amd64_codegen_post(inst); } while (0)
1348 #define amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xadd_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size)); amd64_codegen_post(inst); } while (0)
1349 
1350 
1351 
1352 
1353 #define amd64_breakpoint(inst) amd64_breakpoint_size(inst,8)
1354 #define amd64_cld(inst) amd64_cld_size(inst,8)
1355 #define amd64_stosb(inst) amd64_stosb_size(inst,8)
1356 #define amd64_stosl(inst) amd64_stosl_size(inst,8)
1357 #define amd64_stosd(inst) amd64_stosd_size(inst,8)
1358 #define amd64_movsb(inst) amd64_movsb_size(inst,8)
1359 #define amd64_movsl(inst) amd64_movsl_size(inst,8)
1360 #define amd64_movsd(inst) amd64_movsd_size(inst,8)
1361 #define amd64_prefix(inst,p) amd64_prefix_size(inst,p,8)
1362 #define amd64_rdtsc(inst) amd64_rdtsc_size(inst,8)
1363 #define amd64_cmpxchg_reg_reg(inst,dreg,reg) amd64_cmpxchg_reg_reg_size(inst,dreg,reg,8)
1364 #define amd64_cmpxchg_mem_reg(inst,mem,reg) amd64_cmpxchg_mem_reg_size(inst,mem,reg,8)
1365 #define amd64_cmpxchg_membase_reg(inst,basereg,disp,reg) amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,8)
1366 #define amd64_xchg_reg_reg(inst,dreg,reg,size) amd64_xchg_reg_reg_size(inst,dreg,reg,size)
1367 #define amd64_xchg_mem_reg(inst,mem,reg,size) amd64_xchg_mem_reg_size(inst,mem,reg,size)
1368 #define amd64_xchg_membase_reg(inst,basereg,disp,reg,size) amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size)
1369 #define amd64_xadd_reg_reg(inst,dreg,reg,size) amd64_xadd_reg_reg_size(inst,dreg,reg,size)
1370 #define amd64_xadd_mem_reg(inst,mem,reg,size) amd64_xadd_mem_reg_size(inst,mem,reg,size)
1371 #define amd64_xadd_membase_reg(inst,basereg,disp,reg,size) amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size)
1372 #define amd64_inc_mem(inst,mem) amd64_inc_mem_size(inst,mem,8)
1373 #define amd64_inc_membase(inst,basereg,disp) amd64_inc_membase_size(inst,basereg,disp,8)
1374 #define amd64_inc_reg(inst,reg) amd64_inc_reg_size(inst,reg,8)
1375 #define amd64_dec_mem(inst,mem) amd64_dec_mem_size(inst,mem,8)
1376 #define amd64_dec_membase(inst,basereg,disp) amd64_dec_membase_size(inst,basereg,disp,8)
1377 #define amd64_dec_reg(inst,reg) amd64_dec_reg_size(inst,reg,8)
1378 #define amd64_not_mem(inst,mem) amd64_not_mem_size(inst,mem,8)
1379 #define amd64_not_membase(inst,basereg,disp) amd64_not_membase_size(inst,basereg,disp,8)
1380 #define amd64_not_reg(inst,reg) amd64_not_reg_size(inst,reg,8)
1381 #define amd64_neg_mem(inst,mem) amd64_neg_mem_size(inst,mem,8)
1382 #define amd64_neg_membase(inst,basereg,disp) amd64_neg_membase_size(inst,basereg,disp,8)
1383 #define amd64_neg_reg(inst,reg) amd64_neg_reg_size(inst,reg,8)
1384 #define amd64_nop(inst) amd64_nop_size(inst,8)
1385 //#define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size(inst,opc,reg,imm,8)
1386 #define amd64_alu_mem_imm(inst,opc,mem,imm) amd64_alu_mem_imm_size(inst,opc,mem,imm,8)
1387 #define amd64_alu_membase_imm(inst,opc,basereg,disp,imm) amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,8)
1388 #define amd64_alu_mem_reg(inst,opc,mem,reg) amd64_alu_mem_reg_size(inst,opc,mem,reg,8)
1389 #define amd64_alu_membase_reg(inst,opc,basereg,disp,reg) amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,8)
1390 //#define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size(inst,opc,dreg,reg,8)
1391 #define amd64_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,8)
1392 #define amd64_alu_reg_mem(inst,opc,reg,mem) amd64_alu_reg_mem_size(inst,opc,reg,mem,8)
1393 #define amd64_alu_reg_membase(inst,opc,reg,basereg,disp) amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,8)
1394 //#define amd64_test_reg_imm(inst,reg,imm) amd64_test_reg_imm_size(inst,reg,imm,8)
1395 #define amd64_test_mem_imm(inst,mem,imm) amd64_test_mem_imm_size(inst,mem,imm,8)
1396 #define amd64_test_membase_imm(inst,basereg,disp,imm) amd64_test_membase_imm_size(inst,basereg,disp,imm,8)
1397 #define amd64_test_reg_reg(inst,dreg,reg) amd64_test_reg_reg_size(inst,dreg,reg,8)
1398 #define amd64_test_mem_reg(inst,mem,reg) amd64_test_mem_reg_size(inst,mem,reg,8)
1399 #define amd64_test_membase_reg(inst,basereg,disp,reg) amd64_test_membase_reg_size(inst,basereg,disp,reg,8)
1400 #define amd64_shift_reg_imm(inst,opc,reg,imm) amd64_shift_reg_imm_size(inst,opc,reg,imm,8)
1401 #define amd64_shift_mem_imm(inst,opc,mem,imm) amd64_shift_mem_imm_size(inst,opc,mem,imm,8)
1402 #define amd64_shift_membase_imm(inst,opc,basereg,disp,imm) amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,8)
1403 #define amd64_shift_reg(inst,opc,reg) amd64_shift_reg_size(inst,opc,reg,8)
1404 #define amd64_shift_mem(inst,opc,mem) amd64_shift_mem_size(inst,opc,mem,8)
1405 #define amd64_shift_membase(inst,opc,basereg,disp) amd64_shift_membase_size(inst,opc,basereg,disp,8)
1406 #define amd64_shrd_reg(inst,dreg,reg) amd64_shrd_reg_size(inst,dreg,reg,8)
1407 #define amd64_shrd_reg_imm(inst,dreg,reg,shamt) amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,8)
1408 #define amd64_shld_reg(inst,dreg,reg) amd64_shld_reg_size(inst,dreg,reg,8)
1409 #define amd64_shld_reg_imm(inst,dreg,reg,shamt) amd64_shld_reg_imm_size(inst,dreg,reg,shamt,8)
1410 #define amd64_mul_reg(inst,reg,is_signed) amd64_mul_reg_size(inst,reg,is_signed,8)
1411 #define amd64_mul_mem(inst,mem,is_signed) amd64_mul_mem_size(inst,mem,is_signed,8)
1412 #define amd64_mul_membase(inst,basereg,disp,is_signed) amd64_mul_membase_size(inst,basereg,disp,is_signed,8)
1413 #define amd64_imul_reg_reg(inst,dreg,reg) amd64_imul_reg_reg_size(inst,dreg,reg,8)
1414 #define amd64_imul_reg_mem(inst,reg,mem) amd64_imul_reg_mem_size(inst,reg,mem,8)
1415 #define amd64_imul_reg_membase(inst,reg,basereg,disp) amd64_imul_reg_membase_size(inst,reg,basereg,disp,8)
1416 #define amd64_imul_reg_reg_imm(inst,dreg,reg,imm) amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,8)
1417 #define amd64_imul_reg_mem_imm(inst,reg,mem,imm) amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,8)
1418 #define amd64_imul_reg_membase_imm(inst,reg,basereg,disp,imm) amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,8)
1419 #define amd64_div_reg(inst,reg,is_signed) amd64_div_reg_size(inst,reg,is_signed,8)
1420 #define amd64_div_mem(inst,mem,is_signed) amd64_div_mem_size(inst,mem,is_signed,8)
1421 #define amd64_div_membase(inst,basereg,disp,is_signed) amd64_div_membase_size(inst,basereg,disp,is_signed,8)
1422 //#define amd64_mov_mem_reg(inst,mem,reg,size) amd64_mov_mem_reg_size(inst,mem,reg,size)
1423 //#define amd64_mov_regp_reg(inst,regp,reg,size) amd64_mov_regp_reg_size(inst,regp,reg,size)
1424 //#define amd64_mov_membase_reg(inst,basereg,disp,reg,size) amd64_mov_membase_reg_size(inst,basereg,disp,reg,size)
1425 #define amd64_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size)
1426 //#define amd64_mov_reg_reg(inst,dreg,reg,size) amd64_mov_reg_reg_size(inst,dreg,reg,size)
1427 //#define amd64_mov_reg_mem(inst,reg,mem,size) amd64_mov_reg_mem_size(inst,reg,mem,size)
1428 //#define amd64_mov_reg_membase(inst,reg,basereg,disp,size) amd64_mov_reg_membase_size(inst,reg,basereg,disp,size)
1429 #define amd64_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size)
1430 #define amd64_clear_reg(inst,reg) amd64_clear_reg_size(inst,reg,8)
1431 //#define amd64_mov_reg_imm(inst,reg,imm) amd64_mov_reg_imm_size(inst,reg,imm,8)
1432 #define amd64_mov_mem_imm(inst,mem,imm,size) amd64_mov_mem_imm_size(inst,mem,imm,size)
1433 //#define amd64_mov_membase_imm(inst,basereg,disp,imm,size) amd64_mov_membase_imm_size(inst,basereg,disp,imm,size)
1434 #define amd64_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size)
1435 #define amd64_lea_mem(inst,reg,mem) amd64_lea_mem_size(inst,reg,mem,8)
1436 //#define amd64_lea_membase(inst,reg,basereg,disp) amd64_lea_membase_size(inst,reg,basereg,disp,8)
1437 #define amd64_lea_memindex(inst,reg,basereg,disp,indexreg,shift) amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,8)
1438 #define amd64_widen_reg(inst,dreg,reg,is_signed,is_half) amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,8)
1439 #define amd64_widen_mem(inst,dreg,mem,is_signed,is_half) amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,8)
1440 #define amd64_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,8)
1441 #define amd64_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,8)
1442 #define amd64_cdq(inst) amd64_cdq_size(inst,8)
1443 #define amd64_wait(inst) amd64_wait_size(inst,8)
1444 #define amd64_fp_op_mem(inst,opc,mem,is_double) amd64_fp_op_mem_size(inst,opc,mem,is_double,8)
1445 #define amd64_fp_op_membase(inst,opc,basereg,disp,is_double) amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,8)
1446 #define amd64_fp_op(inst,opc,index) amd64_fp_op_size(inst,opc,index,8)
1447 #define amd64_fp_op_reg(inst,opc,index,pop_stack) amd64_fp_op_reg_size(inst,opc,index,pop_stack,8)
1448 #define amd64_fp_int_op_membase(inst,opc,basereg,disp,is_int) amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,8)
1449 #define amd64_fstp(inst,index) amd64_fstp_size(inst,index,8)
1450 #define amd64_fcompp(inst) amd64_fcompp_size(inst,8)
1451 #define amd64_fucompp(inst) amd64_fucompp_size(inst,8)
1452 #define amd64_fnstsw(inst) amd64_fnstsw_size(inst,8)
1453 #define amd64_fnstcw(inst,mem) amd64_fnstcw_size(inst,mem,8)
1454 #define amd64_fnstcw_membase(inst,basereg,disp) amd64_fnstcw_membase_size(inst,basereg,disp,8)
1455 #define amd64_fldcw(inst,mem) amd64_fldcw_size(inst,mem,8)
1456 #define amd64_fldcw_membase(inst,basereg,disp) amd64_fldcw_membase_size(inst,basereg,disp,8)
1457 #define amd64_fchs(inst) amd64_fchs_size(inst,8)
1458 #define amd64_frem(inst) amd64_frem_size(inst,8)
1459 #define amd64_fxch(inst,index) amd64_fxch_size(inst,index,8)
1460 #define amd64_fcomi(inst,index) amd64_fcomi_size(inst,index,8)
1461 #define amd64_fcomip(inst,index) amd64_fcomip_size(inst,index,8)
1462 #define amd64_fucomi(inst,index) amd64_fucomi_size(inst,index,8)
1463 #define amd64_fucomip(inst,index) amd64_fucomip_size(inst,index,8)
1464 #define amd64_fld(inst,mem,is_double) amd64_fld_size(inst,mem,is_double,8)
1465 #define amd64_fld_membase(inst,basereg,disp,is_double)  amd64_fld_membase_size(inst,basereg,disp,is_double,8)
1466 #define amd64_fld80_mem(inst,mem) amd64_fld80_mem_size(inst,mem,8)
1467 #define amd64_fld80_membase(inst,basereg,disp) amd64_fld80_membase_size(inst,basereg,disp,8)
1468 #define amd64_fild(inst,mem,is_long) amd64_fild_size(inst,mem,is_long,8)
1469 #define amd64_fild_membase(inst,basereg,disp,is_long) amd64_fild_membase_size(inst,basereg,disp,is_long,8)
1470 #define amd64_fld_reg(inst,index) amd64_fld_reg_size(inst,index,8)
1471 #define amd64_fldz(inst) amd64_fldz_size(inst,8)
1472 #define amd64_fld1(inst) amd64_fld1_size(inst,8)
1473 #define amd64_fldpi(inst) amd64_fldpi_size(inst,8)
1474 #define amd64_fst(inst,mem,is_double,pop_stack) amd64_fst_size(inst,mem,is_double,pop_stack,8)
1475 #define amd64_fst_membase(inst,basereg,disp,is_double,pop_stack) amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,8)
1476 #define amd64_fst80_mem(inst,mem) amd64_fst80_mem_size(inst,mem,8)
1477 #define amd64_fst80_membase(inst,basereg,disp) amd64_fst80_membase_size(inst,basereg,disp,8)
1478 #define amd64_fist_pop(inst,mem,is_long) amd64_fist_pop_size(inst,mem,is_long,8)
1479 #define amd64_fist_pop_membase(inst,basereg,disp,is_long) amd64_fist_pop_membase_size(inst,basereg,disp,is_long,8)
1480 #define amd64_fstsw(inst) amd64_fstsw_size(inst,8)
1481 #define amd64_fist_membase(inst,basereg,disp,is_int) amd64_fist_membase_size(inst,basereg,disp,is_int,8)
1482 //#define amd64_push_reg(inst,reg) amd64_push_reg_size(inst,reg,8)
1483 #define amd64_push_regp(inst,reg) amd64_push_regp_size(inst,reg,8)
1484 #define amd64_push_mem(inst,mem) amd64_push_mem_size(inst,mem,8)
1485 //#define amd64_push_membase(inst,basereg,disp) amd64_push_membase_size(inst,basereg,disp,8)
1486 #define amd64_push_memindex(inst,basereg,disp,indexreg,shift) amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,8)
1487 #define amd64_push_imm(inst,imm) amd64_push_imm_size(inst,imm,8)
1488 //#define amd64_pop_reg(inst,reg) amd64_pop_reg_size(inst,reg,8)
1489 #define amd64_pop_mem(inst,mem) amd64_pop_mem_size(inst,mem,8)
1490 #define amd64_pop_membase(inst,basereg,disp) amd64_pop_membase_size(inst,basereg,disp,8)
1491 #define amd64_pushad(inst) amd64_pushad_size(inst,8)
1492 #define amd64_pushfd(inst) amd64_pushfd_size(inst,8)
1493 #define amd64_popad(inst) amd64_popad_size(inst,8)
1494 #define amd64_popfd(inst) amd64_popfd_size(inst,8)
1495 #define amd64_loop(inst,imm) amd64_loop_size(inst,imm,8)
1496 #define amd64_loope(inst,imm) amd64_loope_size(inst,imm,8)
1497 #define amd64_loopne(inst,imm) amd64_loopne_size(inst,imm,8)
1498 #define amd64_jump32(inst,imm) amd64_jump32_size(inst,imm,8)
1499 #define amd64_jump8(inst,imm) amd64_jump8_size(inst,imm,8)
1500 #define amd64_jump_reg(inst,reg) amd64_jump_reg_size(inst,reg,8)
1501 #define amd64_jump_mem(inst,mem) amd64_jump_mem_size(inst,mem,8)
1502 #define amd64_jump_membase(inst,basereg,disp) amd64_jump_membase_size(inst,basereg,disp,8)
1503 #define amd64_jump_code(inst,target) amd64_jump_code_size(inst,target,8)
1504 #define amd64_jump_disp(inst,disp) amd64_jump_disp_size(inst,disp,8)
1505 #define amd64_branch8(inst,cond,imm,is_signed) amd64_branch8_size(inst,cond,imm,is_signed,8)
1506 #define amd64_branch32(inst,cond,imm,is_signed) amd64_branch32_size(inst,cond,imm,is_signed,8)
1507 #define amd64_branch(inst,cond,target,is_signed) amd64_branch_size(inst,cond,target,is_signed,8)
1508 #define amd64_branch_disp(inst,cond,disp,is_signed) amd64_branch_disp_size(inst,cond,disp,is_signed,8)
1509 #define amd64_set_reg(inst,cond,reg,is_signed) amd64_set_reg_size(inst,cond,reg,is_signed,8)
1510 #define amd64_set_mem(inst,cond,mem,is_signed) amd64_set_mem_size(inst,cond,mem,is_signed,8)
1511 #define amd64_set_membase(inst,cond,basereg,disp,is_signed) amd64_set_membase_size(inst,cond,basereg,disp,is_signed,8)
1512 #define amd64_call_imm(inst,disp) amd64_call_imm_size(inst,disp,8)
1513 //#define amd64_call_reg(inst,reg) amd64_call_reg_size(inst,reg,8)
1514 #define amd64_call_mem(inst,mem) amd64_call_mem_size(inst,mem,8)
1515 #define amd64_call_membase(inst,basereg,disp) amd64_call_membase_size(inst,basereg,disp,8)
1516 #define amd64_call_code(inst,target) amd64_call_code_size(inst,target,8)
1517 //#define amd64_ret(inst) amd64_ret_size(inst,8)
1518 #define amd64_ret_imm(inst,imm) amd64_ret_imm_size(inst,imm,8)
1519 #define amd64_cmov_reg(inst,cond,is_signed,dreg,reg) amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,8)
1520 #define amd64_cmov_mem(inst,cond,is_signed,reg,mem) amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,8)
1521 #define amd64_cmov_membase(inst,cond,is_signed,reg,basereg,disp) amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,8)
1522 #define amd64_enter(inst,framesize) amd64_enter_size(inst,framesize)
1523 //#define amd64_leave(inst) amd64_leave_size(inst,8)
1524 #define amd64_sahf(inst) amd64_sahf_size(inst,8)
1525 #define amd64_fsin(inst) amd64_fsin_size(inst,8)
1526 #define amd64_fcos(inst) amd64_fcos_size(inst,8)
1527 #define amd64_fabs(inst) amd64_fabs_size(inst,8)
1528 #define amd64_ftst(inst) amd64_ftst_size(inst,8)
1529 #define amd64_fxam(inst) amd64_fxam_size(inst,8)
1530 #define amd64_fpatan(inst) amd64_fpatan_size(inst,8)
1531 #define amd64_fprem(inst) amd64_fprem_size(inst,8)
1532 #define amd64_fprem1(inst) amd64_fprem1_size(inst,8)
1533 #define amd64_frndint(inst) amd64_frndint_size(inst,8)
1534 #define amd64_fsqrt(inst) amd64_fsqrt_size(inst,8)
1535 #define amd64_fptan(inst) amd64_fptan_size(inst,8)
1536 #define amd64_padding(inst,size) amd64_padding_size(inst,size)
1537 #define amd64_prolog(inst,frame,reg_mask) amd64_prolog_size(inst,frame,reg_mask,8)
1538 #define amd64_epilog(inst,reg_mask) amd64_epilog_size(inst,reg_mask,8)
1539 
1540 #endif // AMD64_H
1541