1# Check 64bit instructions with pseudo prefixes for encoding
2
3	.text
4_start:
5	{vex3} vmovaps %xmm7,%xmm2
6	{vex3} {load} vmovaps %xmm7,%xmm2
7	{vex3} {store} vmovaps %xmm7,%xmm2
8	vmovaps %xmm7,%xmm2
9	{vex} vmovaps %xmm7,%xmm2
10	{vex} {load} vmovaps %xmm7,%xmm2
11	{vex} {store} vmovaps %xmm7,%xmm2
12	{vex3} vmovaps (%rax),%xmm2
13	vmovaps (%rax),%xmm2
14	{vex2} vmovaps (%rax),%xmm2
15	{evex} vmovaps (%rax),%xmm2
16	{disp32} vmovaps (%rax),%xmm2
17	{evex} {disp8} vmovaps (%rax),%xmm2
18	{evex} {disp32} vmovaps (%rax),%xmm2
19
20	{vex} {disp8} vmovaps 128(%rax),%xmm2
21	{vex} {disp32} vmovaps 128(%rax),%xmm2
22	{evex} {disp8} vmovaps 128(%rax),%xmm2
23	{evex} {disp32} vmovaps 128(%rax),%xmm2
24
25	mov %rcx, %rax
26	{load} mov %rcx, %rax
27	{store} mov %rcx, %rax
28	adc %ecx, %eax
29	{load} adc %ecx, %eax
30	{store} adc %ecx, %eax
31	add %ecx, %eax
32	{load} add %ecx, %eax
33	{store} add %ecx, %eax
34	and %ecx, %eax
35	{load} and %ecx, %eax
36	{store} and %ecx, %eax
37	cmp %ecx, %eax
38	{load} cmp %ecx, %eax
39	{store} cmp %ecx, %eax
40	or %ecx, %eax
41	{load} or %ecx, %eax
42	{store} or %ecx, %eax
43	sbb %ecx, %eax
44	{load} sbb %ecx, %eax
45	{store} sbb %ecx, %eax
46	sub %ecx, %eax
47	{load} sub %ecx, %eax
48	{store} sub %ecx, %eax
49	xor %ecx, %eax
50	{load} xor %ecx, %eax
51	{store} xor %ecx, %eax
52
53	{load} mov 0x12345678, %eax
54	{load} mov %eax, 0x12345678
55	{store} mov 0x12345678, %eax
56	{store} mov %eax, 0x12345678
57	{load} mov 0x123456789abcdef0, %eax
58	{load} mov %eax, 0x123456789abcdef0
59	{store} mov 0x123456789abcdef0, %eax
60	{store} mov %eax, 0x123456789abcdef0
61	{load} movabs 0x123456789abcdef0, %eax
62	{load} movabs %eax, 0x123456789abcdef0
63	{store} movabs 0x123456789abcdef0, %eax
64	{store} movabs %eax, 0x123456789abcdef0
65	{load} mov %eax, (%rdi)
66	{load} mov (%rdi), %eax
67	{store} mov %eax, (%rdi)
68	{store} mov (%rdi), %eax
69	{load} mov %es, %edi
70	{load} mov %eax, %gs
71	{store} mov %es, %edi
72	{store} mov %eax, %gs
73	{load} mov %cr0, %rdi
74	{load} mov %rax, %cr7
75	{store} mov %cr0, %rdi
76	{store} mov %rax, %cr7
77	{load} mov %dr0, %rdi
78	{load} mov %rax, %dr7
79	{store} mov %dr0, %rdi
80	{store} mov %rax, %dr7
81	{load} kmovb %k0, %edi
82	{load} kmovb %eax, %k7
83	{store} kmovb %k0, %edi
84	{store} kmovb %eax, %k7
85	{load} kmovd %k0, %edi
86	{load} kmovd %eax, %k7
87	{store} kmovd %k0, %edi
88	{store} kmovd %eax, %k7
89	{load} kmovq %k0, %rdi
90	{load} kmovq %rax, %k7
91	{store} kmovq %k0, %rdi
92	{store} kmovq %rax, %k7
93	{load} kmovw %k0, %edi
94	{load} kmovw %eax, %k7
95	{store} kmovw %k0, %edi
96	{store} kmovw %eax, %k7
97	{load} kmovb %k0, %k7
98	{store} kmovb %k0, %k7
99	{load} kmovd %k0, %k7
100	{store} kmovd %k0, %k7
101	{load} kmovq %k0, %k7
102	{store} kmovq %k0, %k7
103	{load} kmovw %k0, %k7
104	{store} kmovw %k0, %k7
105	{load} adc %eax, (%rdi)
106	{load} adc (%rdi), %eax
107	{store} adc %eax, (%rdi)
108	{store} adc (%rdi), %eax
109	{load} add %eax, (%rdi)
110	{load} add (%rdi), %eax
111	{store} add %eax, (%rdi)
112	{store} add (%rdi), %eax
113	{load} and %eax, (%rdi)
114	{load} and (%rdi), %eax
115	{store} and %eax, (%rdi)
116	{store} and (%rdi), %eax
117	{load} cmp %eax, (%rdi)
118	{load} cmp (%rdi), %eax
119	{store} cmp %eax, (%rdi)
120	{store} cmp (%rdi), %eax
121	{load} or %eax, (%rdi)
122	{load} or (%rdi), %eax
123	{store} or %eax, (%rdi)
124	{store} or (%rdi), %eax
125	{load} sbb %eax, (%rdi)
126	{load} sbb (%rdi), %eax
127	{store} sbb %eax, (%rdi)
128	{store} sbb (%rdi), %eax
129	{load} sub %eax, (%rdi)
130	{load} sub (%rdi), %eax
131	{store} sub %eax, (%rdi)
132	{store} sub (%rdi), %eax
133	{load} xor %eax, (%rdi)
134	{load} xor (%rdi), %eax
135	{store} xor %eax, (%rdi)
136	{store} xor (%rdi), %eax
137
138	fadd %st, %st
139	{load} fadd %st, %st
140	{store} fadd %st, %st
141	fdiv %st, %st
142	{load} fdiv %st, %st
143	{store} fdiv %st, %st
144	fdivr %st, %st
145	{load} fdivr %st, %st
146	{store} fdivr %st, %st
147	fmul %st, %st
148	{load} fmul %st, %st
149	{store} fmul %st, %st
150	fsub %st, %st
151	{load} fsub %st, %st
152	{store} fsub %st, %st
153	fsubr %st, %st
154	{load} fsubr %st, %st
155	{store} fsubr %st, %st
156
157	movq %mm0, %mm7
158	{load} movq %mm0, %mm7
159	{store} movq %mm0, %mm7
160
161	movaps %xmm0, %xmm7
162	{load} movaps %xmm0, %xmm7
163	{store} movaps %xmm0, %xmm7
164	movups %xmm0, %xmm7
165	{load} movups %xmm0, %xmm7
166	{store} movups %xmm0, %xmm7
167	movss %xmm0, %xmm7
168	{load} movss %xmm0, %xmm7
169	{store} movss %xmm0, %xmm7
170	movapd %xmm0, %xmm7
171	{load} movapd %xmm0, %xmm7
172	{store} movapd %xmm0, %xmm7
173	movupd %xmm0, %xmm7
174	{load} movupd %xmm0, %xmm7
175	{store} movupd %xmm0, %xmm7
176	movsd %xmm0, %xmm7
177	{load} movsd %xmm0, %xmm7
178	{store} movsd %xmm0, %xmm7
179	movdqa %xmm0, %xmm7
180	{load} movdqa %xmm0, %xmm7
181	{store} movdqa %xmm0, %xmm7
182	movdqu %xmm0, %xmm7
183	{load} movdqu %xmm0, %xmm7
184	{store} movdqu %xmm0, %xmm7
185	movq %xmm0, %xmm7
186	{load} movq %xmm0, %xmm7
187	{store} movq %xmm0, %xmm7
188	vmovaps %xmm0, %xmm7
189	{load} vmovaps %xmm0, %xmm7
190	{store} vmovaps %xmm0, %xmm7
191	vmovaps %zmm0, %zmm7
192	{load} vmovaps %zmm0, %zmm7
193	{store} vmovaps %zmm0, %zmm7
194	vmovaps %xmm0, %xmm7{%k7}
195	{load} vmovaps %xmm0, %xmm7{%k7}
196	{store} vmovaps %xmm0, %xmm7{%k7}
197	vmovups %zmm0, %zmm7
198	{load} vmovups %zmm0, %zmm7
199	{store} vmovups %zmm0, %zmm7
200	vmovups %xmm0, %xmm7
201	{load} vmovups %xmm0, %xmm7
202	{store} vmovups %xmm0, %xmm7
203	vmovups %xmm0, %xmm7{%k7}
204	{load} vmovups %xmm0, %xmm7{%k7}
205	{store} vmovups %xmm0, %xmm7{%k7}
206	vmovss %xmm0, %xmm1, %xmm7
207	{load} vmovss %xmm0, %xmm1, %xmm7
208	{store} vmovss %xmm0, %xmm1, %xmm7
209	vmovss %xmm0, %xmm1, %xmm7{%k7}
210	{load} vmovss %xmm0, %xmm1, %xmm7{%k7}
211	{store} vmovss %xmm0, %xmm1, %xmm7{%k7}
212	vmovapd %xmm0, %xmm7
213	{load} vmovapd %xmm0, %xmm7
214	{store} vmovapd %xmm0, %xmm7
215	vmovapd %zmm0, %zmm7
216	{load} vmovapd %zmm0, %zmm7
217	{store} vmovapd %zmm0, %zmm7
218	vmovapd %xmm0, %xmm7{%k7}
219	{load} vmovapd %xmm0, %xmm7{%k7}
220	{store} vmovapd %xmm0, %xmm7{%k7}
221	vmovupd %xmm0, %xmm7
222	{load} vmovupd %xmm0, %xmm7
223	{store} vmovupd %xmm0, %xmm7
224	vmovupd %zmm0, %zmm7
225	{load} vmovupd %zmm0, %zmm7
226	{store} vmovupd %zmm0, %zmm7
227	vmovupd %xmm0, %xmm7{%k7}
228	{load} vmovupd %xmm0, %xmm7{%k7}
229	{store} vmovupd %xmm0, %xmm7{%k7}
230	vmovsd %xmm0, %xmm1, %xmm7
231	{load} vmovsd %xmm0, %xmm1, %xmm7
232	{store} vmovsd %xmm0, %xmm1, %xmm7
233	vmovsd %xmm0, %xmm1, %xmm7{%k7}
234	{load} vmovsd %xmm0, %xmm1, %xmm7{%k7}
235	{store} vmovsd %xmm0, %xmm1, %xmm7{%k7}
236	vmovdqa %xmm0, %xmm7
237	{load} vmovdqa %xmm0, %xmm7
238	{store} vmovdqa %xmm0, %xmm7
239	vmovdqa32 %zmm0, %zmm7
240	{load} vmovdqa32 %zmm0, %zmm7
241	{store} vmovdqa32 %zmm0, %zmm7
242	vmovdqa32 %xmm0, %xmm7
243	{load} vmovdqa32 %xmm0, %xmm7
244	{store} vmovdqa32 %xmm0, %xmm7
245	vmovdqa64 %zmm0, %zmm7
246	{load} vmovdqa64 %zmm0, %zmm7
247	{store} vmovdqa64 %zmm0, %zmm7
248	vmovdqa64 %xmm0, %xmm7
249	{load} vmovdqa64 %xmm0, %xmm7
250	{store} vmovdqa64 %xmm0, %xmm7
251	vmovdqu %xmm0, %xmm7
252	{load} vmovdqu %xmm0, %xmm7
253	{store} vmovdqu %xmm0, %xmm7
254	vmovdqu8 %zmm0, %zmm7
255	{load} vmovdqu8 %zmm0, %zmm7
256	{store} vmovdqu8 %zmm0, %zmm7
257	vmovdqu8 %xmm0, %xmm7
258	{load} vmovdqu8 %xmm0, %xmm7
259	{store} vmovdqu8 %zmm0, %zmm7
260	vmovdqu16 %zmm0, %zmm7
261	{load} vmovdqu16 %zmm0, %zmm7
262	{store} vmovdqu16 %zmm0, %zmm7
263	vmovdqu16 %xmm0, %xmm7
264	{load} vmovdqu16 %xmm0, %xmm7
265	{store} vmovdqu16 %xmm0, %xmm7
266	vmovdqu32 %zmm0, %zmm7
267	{load} vmovdqu32 %zmm0, %zmm7
268	{store} vmovdqu32 %zmm0, %zmm7
269	vmovdqu32 %xmm0, %xmm7
270	{load} vmovdqu32 %xmm0, %xmm7
271	{store} vmovdqu32 %xmm0, %xmm7
272	vmovdqu64 %zmm0, %zmm7
273	{load} vmovdqu64 %zmm0, %zmm7
274	{store} vmovdqu64 %zmm0, %zmm7
275	vmovdqu64 %xmm0, %xmm7
276	{load} vmovdqu64 %xmm0, %xmm7
277	{store} vmovdqu64 %xmm0, %xmm7
278	vmovq %xmm0, %xmm7
279	{load} vmovq %xmm0, %xmm7
280	{store} vmovq %xmm0, %xmm7
281	{evex} vmovq %xmm0, %xmm7
282	{load} {evex} vmovq %xmm0, %xmm7
283	{store} {evex} vmovq %xmm0, %xmm7
284
285	pextrw $0, %xmm0, %edi
286	{load} pextrw $0, %xmm0, %edi
287	{store} pextrw $0, %xmm0, %edi
288
289	vpextrw $0, %xmm0, %edi
290	{load} vpextrw $0, %xmm0, %edi
291	{store} vpextrw $0, %xmm0, %edi
292
293	{evex} vpextrw $0, %xmm0, %edi
294	{load} {evex} vpextrw $0, %xmm0, %edi
295	{store} {evex} vpextrw $0, %xmm0, %edi
296
297	bndmov %bnd3, %bnd0
298	{load} bndmov %bnd3, %bnd0
299	{store} bndmov %bnd3, %bnd0
300
301	movaps (%rax),%xmm2
302	{load} movaps (%rax),%xmm2
303	{store} movaps (%rax),%xmm2
304	{disp8} movaps (%rax),%xmm2
305	{disp32} movaps (%rax),%xmm2
306	movaps -1(%rax),%xmm2
307	{disp8} movaps -1(%rax),%xmm2
308	{disp32} movaps -1(%rax),%xmm2
309	movaps 128(%rax),%xmm2
310	{disp8} movaps 128(%rax),%xmm2
311	{disp32} movaps 128(%rax),%xmm2
312	{rex} mov %al,%ah
313	{rex} shl %cl, %eax
314	{rex} movabs 1, %al
315	{rex} cmp %cl, %dl
316	{rex} mov $1, %bl
317	{rex} crc32 %cl, %eax
318	{rex} movl %eax,%ebx
319	{rex} movl %eax,%r14d
320	{rex} movl %eax,(%r8)
321	{rex} movaps %xmm7,%xmm2
322	{rex} movaps %xmm7,%xmm12
323	{rex} movaps (%rcx),%xmm2
324	{rex} movaps (%r8),%xmm2
325	{rex} phaddw (%rcx),%mm0
326	{rex} phaddw (%r8),%mm0
327
328	movb (%rbp),%al
329	{disp8} movb (%rbp),%al
330	{disp32} movb (%rbp),%al
331
332	movb (%ebp),%al
333	{disp8} movb (%ebp),%al
334	{disp32} movb (%ebp),%al
335
336	movb (%r13),%al
337	{disp8} movb (%r13),%al
338	{disp32} movb (%r13),%al
339
340	movb (%r13d),%al
341	{disp8} movb (%r13d),%al
342	{disp32} movb (%r13d),%al
343
344	.intel_syntax noprefix
345	{vex3} vmovaps xmm2,xmm7
346	{vex3} {load} vmovaps xmm2,xmm7
347	{vex3} {store} vmovaps xmm2,xmm7
348	vmovaps xmm2,xmm7
349	{vex2} vmovaps xmm2,xmm7
350	{vex2} {load} vmovaps xmm2,xmm7
351	{vex2} {store} vmovaps xmm2,xmm7
352	{vex3} vmovaps xmm2,XMMWORD PTR [rax]
353	vmovaps xmm2,XMMWORD PTR [rax]
354	{vex2} vmovaps xmm2,XMMWORD PTR [rax]
355	{evex} vmovaps xmm2,XMMWORD PTR [rax]
356	{disp32} vmovaps xmm2,XMMWORD PTR [rax]
357	{evex} {disp8} vmovaps xmm2,XMMWORD PTR [rax]
358	{evex} {disp32} vmovaps xmm2,XMMWORD PTR [rax]
359
360	{vex} {disp8} vmovaps xmm2,XMMWORD PTR [rax+128]
361	{vex} {disp32} vmovaps xmm2,XMMWORD PTR [rax+128]
362	{evex} {disp8} vmovaps xmm2,XMMWORD PTR [rax+128]
363	{evex} {disp32} vmovaps xmm2,XMMWORD PTR [rax+128]
364
365	mov rax,rcx
366	{load} mov rax,rcx
367	{store} mov rax,rcx
368	movaps xmm2,XMMWORD PTR [rax]
369	{load} movaps xmm2,XMMWORD PTR [rax]
370	{store} movaps xmm2,XMMWORD PTR [rax]
371	{disp8} movaps xmm2,XMMWORD PTR [rax]
372	{disp32} movaps xmm2,XMMWORD PTR [rax]
373	movaps xmm2,XMMWORD PTR [rax-1]
374	{disp8} movaps xmm2,XMMWORD PTR [rax-1]
375	{disp32} movaps xmm2,XMMWORD PTR [rax-1]
376	movaps xmm2,XMMWORD PTR [rax+128]
377	{disp8} movaps xmm2,XMMWORD PTR [rax+128]
378	{disp32} movaps xmm2,XMMWORD PTR [rax+128]
379	{rex} mov ah,al
380	{rex} mov ebx,eax
381	{rex} mov r14d,eax
382	{rex} mov DWORD PTR [r8],eax
383	{rex} movaps xmm2,xmm7
384	{rex} movaps xmm12,xmm7
385	{rex} movaps xmm2,XMMWORD PTR [rcx]
386	{rex} movaps xmm2,XMMWORD PTR [r8]
387	{rex} phaddw mm0,QWORD PTR [rcx]
388	{rex} phaddw mm0,QWORD PTR [r8]
389
390	mov al, BYTE PTR [rbp]
391	{disp8} mov al, BYTE PTR [rbp]
392	{disp32} mov al, BYTE PTR [rbp]
393
394	mov al, BYTE PTR [ebp]
395	{disp8} mov al, BYTE PTR [ebp]
396	{disp32} mov al, BYTE PTR [ebp]
397
398	mov al, BYTE PTR [r13]
399	{disp8} mov al, BYTE PTR [r13]
400	{disp32} mov al, BYTE PTR [r13]
401
402	mov al, BYTE PTR [r13]
403	{disp8} mov al, BYTE PTR [r13d]
404	{disp32} mov al, BYTE PTR [r13d]
405