1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0:  .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0:	.quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 outp
39
40# qhasm: int64 len
41
42# qhasm: int64 np
43
44# qhasm: int64 c
45
46# qhasm: input outp
47
48# qhasm: input len
49
50# qhasm: input np
51
52# qhasm: input c
53
54# qhasm: int64 lensav
55
56# qhasm: int6464 xmm0
57
58# qhasm: int6464 xmm1
59
60# qhasm: int6464 xmm2
61
62# qhasm: int6464 xmm3
63
64# qhasm: int6464 xmm4
65
66# qhasm: int6464 xmm5
67
68# qhasm: int6464 xmm6
69
70# qhasm: int6464 xmm7
71
72# qhasm: int6464 xmm8
73
74# qhasm: int6464 xmm9
75
76# qhasm: int6464 xmm10
77
78# qhasm: int6464 xmm11
79
80# qhasm: int6464 xmm12
81
82# qhasm: int6464 xmm13
83
84# qhasm: int6464 xmm14
85
86# qhasm: int6464 xmm15
87
88# qhasm: int6464 t
89
90# qhasm: stack1024 bl
91
92# qhasm: stack128 nonce_stack
93
94# qhasm: int64 blp
95
96# qhasm: int64 b
97
98# qhasm: int64 tmp
99
100# qhasm: enter crypto_stream_aes128ctr_core2_afternm
101.text
102.p2align 5
103.globl _crypto_stream_aes128ctr_core2_afternm
104.globl crypto_stream_aes128ctr_core2_afternm
105_crypto_stream_aes128ctr_core2_afternm:
106crypto_stream_aes128ctr_core2_afternm:
107mov %rsp,%r11
108and $31,%r11
109add $160,%r11
110sub %r11,%rsp
111
112# qhasm: xmm0 = *(int128 *) (np + 0)
113# asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1
114# asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0
115movdqa 0(%rdx),%xmm0
116
117# qhasm: nonce_stack = xmm0
118# asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1
119# asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp)
120movdqa %xmm0,0(%rsp)
121
122# qhasm: np = &nonce_stack
123# asm 1: leaq <nonce_stack=stack128#1,>np=int64#3
124# asm 2: leaq <nonce_stack=0(%rsp),>np=%rdx
125leaq 0(%rsp),%rdx
126
127# qhasm: enc_block:
128._enc_block:
129
130# qhasm: xmm0 = *(int128 *) (np + 0)
131# asm 1: movdqa 0(<np=int64#3),>xmm0=int6464#1
132# asm 2: movdqa 0(<np=%rdx),>xmm0=%xmm0
133movdqa 0(%rdx),%xmm0
134
135# qhasm: xmm1 = xmm0
136# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
137# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
138movdqa %xmm0,%xmm1
139
140# qhasm: shuffle bytes of xmm1 by SWAP32
141# asm 1: pshufb SWAP32,<xmm1=int6464#2
142# asm 2: pshufb SWAP32,<xmm1=%xmm1
143pshufb SWAP32,%xmm1
144
145# qhasm: xmm2 = xmm1
146# asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3
147# asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2
148movdqa %xmm1,%xmm2
149
150# qhasm: xmm3 = xmm1
151# asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4
152# asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3
153movdqa %xmm1,%xmm3
154
155# qhasm: xmm4 = xmm1
156# asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5
157# asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4
158movdqa %xmm1,%xmm4
159
160# qhasm: xmm5 = xmm1
161# asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6
162# asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5
163movdqa %xmm1,%xmm5
164
165# qhasm: xmm6 = xmm1
166# asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7
167# asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6
168movdqa %xmm1,%xmm6
169
170# qhasm: xmm7 = xmm1
171# asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8
172# asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7
173movdqa %xmm1,%xmm7
174
175# qhasm: int32323232 xmm1 += RCTRINC1
176# asm 1: paddd  RCTRINC1,<xmm1=int6464#2
177# asm 2: paddd  RCTRINC1,<xmm1=%xmm1
178paddd  RCTRINC1,%xmm1
179
180# qhasm: int32323232 xmm2 += RCTRINC2
181# asm 1: paddd  RCTRINC2,<xmm2=int6464#3
182# asm 2: paddd  RCTRINC2,<xmm2=%xmm2
183paddd  RCTRINC2,%xmm2
184
185# qhasm: int32323232 xmm3 += RCTRINC3
186# asm 1: paddd  RCTRINC3,<xmm3=int6464#4
187# asm 2: paddd  RCTRINC3,<xmm3=%xmm3
188paddd  RCTRINC3,%xmm3
189
190# qhasm: int32323232 xmm4 += RCTRINC4
191# asm 1: paddd  RCTRINC4,<xmm4=int6464#5
192# asm 2: paddd  RCTRINC4,<xmm4=%xmm4
193paddd  RCTRINC4,%xmm4
194
195# qhasm: int32323232 xmm5 += RCTRINC5
196# asm 1: paddd  RCTRINC5,<xmm5=int6464#6
197# asm 2: paddd  RCTRINC5,<xmm5=%xmm5
198paddd  RCTRINC5,%xmm5
199
200# qhasm: int32323232 xmm6 += RCTRINC6
201# asm 1: paddd  RCTRINC6,<xmm6=int6464#7
202# asm 2: paddd  RCTRINC6,<xmm6=%xmm6
203paddd  RCTRINC6,%xmm6
204
205# qhasm: int32323232 xmm7 += RCTRINC7
206# asm 1: paddd  RCTRINC7,<xmm7=int6464#8
207# asm 2: paddd  RCTRINC7,<xmm7=%xmm7
208paddd  RCTRINC7,%xmm7
209
210# qhasm: shuffle bytes of xmm0 by M0
211# asm 1: pshufb M0,<xmm0=int6464#1
212# asm 2: pshufb M0,<xmm0=%xmm0
213pshufb M0,%xmm0
214
215# qhasm: shuffle bytes of xmm1 by M0SWAP
216# asm 1: pshufb M0SWAP,<xmm1=int6464#2
217# asm 2: pshufb M0SWAP,<xmm1=%xmm1
218pshufb M0SWAP,%xmm1
219
220# qhasm: shuffle bytes of xmm2 by M0SWAP
221# asm 1: pshufb M0SWAP,<xmm2=int6464#3
222# asm 2: pshufb M0SWAP,<xmm2=%xmm2
223pshufb M0SWAP,%xmm2
224
225# qhasm: shuffle bytes of xmm3 by M0SWAP
226# asm 1: pshufb M0SWAP,<xmm3=int6464#4
227# asm 2: pshufb M0SWAP,<xmm3=%xmm3
228pshufb M0SWAP,%xmm3
229
230# qhasm: shuffle bytes of xmm4 by M0SWAP
231# asm 1: pshufb M0SWAP,<xmm4=int6464#5
232# asm 2: pshufb M0SWAP,<xmm4=%xmm4
233pshufb M0SWAP,%xmm4
234
235# qhasm: shuffle bytes of xmm5 by M0SWAP
236# asm 1: pshufb M0SWAP,<xmm5=int6464#6
237# asm 2: pshufb M0SWAP,<xmm5=%xmm5
238pshufb M0SWAP,%xmm5
239
240# qhasm: shuffle bytes of xmm6 by M0SWAP
241# asm 1: pshufb M0SWAP,<xmm6=int6464#7
242# asm 2: pshufb M0SWAP,<xmm6=%xmm6
243pshufb M0SWAP,%xmm6
244
245# qhasm: shuffle bytes of xmm7 by M0SWAP
246# asm 1: pshufb M0SWAP,<xmm7=int6464#8
247# asm 2: pshufb M0SWAP,<xmm7=%xmm7
248pshufb M0SWAP,%xmm7
249
250# qhasm:     xmm8 = xmm6
251# asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9
252# asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8
253movdqa %xmm6,%xmm8
254
255# qhasm:     uint6464 xmm8 >>= 1
256# asm 1: psrlq $1,<xmm8=int6464#9
257# asm 2: psrlq $1,<xmm8=%xmm8
258psrlq $1,%xmm8
259
260# qhasm:     xmm8 ^= xmm7
261# asm 1: pxor  <xmm7=int6464#8,<xmm8=int6464#9
262# asm 2: pxor  <xmm7=%xmm7,<xmm8=%xmm8
263pxor  %xmm7,%xmm8
264
265# qhasm:     xmm8 &= BS0
266# asm 1: pand  BS0,<xmm8=int6464#9
267# asm 2: pand  BS0,<xmm8=%xmm8
268pand  BS0,%xmm8
269
270# qhasm:     xmm7 ^= xmm8
271# asm 1: pxor  <xmm8=int6464#9,<xmm7=int6464#8
272# asm 2: pxor  <xmm8=%xmm8,<xmm7=%xmm7
273pxor  %xmm8,%xmm7
274
275# qhasm:     uint6464 xmm8 <<= 1
276# asm 1: psllq $1,<xmm8=int6464#9
277# asm 2: psllq $1,<xmm8=%xmm8
278psllq $1,%xmm8
279
280# qhasm:     xmm6 ^= xmm8
281# asm 1: pxor  <xmm8=int6464#9,<xmm6=int6464#7
282# asm 2: pxor  <xmm8=%xmm8,<xmm6=%xmm6
283pxor  %xmm8,%xmm6
284
285# qhasm:     xmm8 = xmm4
286# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
287# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
288movdqa %xmm4,%xmm8
289
290# qhasm:     uint6464 xmm8 >>= 1
291# asm 1: psrlq $1,<xmm8=int6464#9
292# asm 2: psrlq $1,<xmm8=%xmm8
293psrlq $1,%xmm8
294
295# qhasm:     xmm8 ^= xmm5
296# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
297# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
298pxor  %xmm5,%xmm8
299
300# qhasm:     xmm8 &= BS0
301# asm 1: pand  BS0,<xmm8=int6464#9
302# asm 2: pand  BS0,<xmm8=%xmm8
303pand  BS0,%xmm8
304
305# qhasm:     xmm5 ^= xmm8
306# asm 1: pxor  <xmm8=int6464#9,<xmm5=int6464#6
307# asm 2: pxor  <xmm8=%xmm8,<xmm5=%xmm5
308pxor  %xmm8,%xmm5
309
310# qhasm:     uint6464 xmm8 <<= 1
311# asm 1: psllq $1,<xmm8=int6464#9
312# asm 2: psllq $1,<xmm8=%xmm8
313psllq $1,%xmm8
314
315# qhasm:     xmm4 ^= xmm8
316# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
317# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
318pxor  %xmm8,%xmm4
319
320# qhasm:     xmm8 = xmm2
321# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
322# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
323movdqa %xmm2,%xmm8
324
325# qhasm:     uint6464 xmm8 >>= 1
326# asm 1: psrlq $1,<xmm8=int6464#9
327# asm 2: psrlq $1,<xmm8=%xmm8
328psrlq $1,%xmm8
329
330# qhasm:     xmm8 ^= xmm3
331# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#9
332# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm8
333pxor  %xmm3,%xmm8
334
335# qhasm:     xmm8 &= BS0
336# asm 1: pand  BS0,<xmm8=int6464#9
337# asm 2: pand  BS0,<xmm8=%xmm8
338pand  BS0,%xmm8
339
340# qhasm:     xmm3 ^= xmm8
341# asm 1: pxor  <xmm8=int6464#9,<xmm3=int6464#4
342# asm 2: pxor  <xmm8=%xmm8,<xmm3=%xmm3
343pxor  %xmm8,%xmm3
344
345# qhasm:     uint6464 xmm8 <<= 1
346# asm 1: psllq $1,<xmm8=int6464#9
347# asm 2: psllq $1,<xmm8=%xmm8
348psllq $1,%xmm8
349
350# qhasm:     xmm2 ^= xmm8
351# asm 1: pxor  <xmm8=int6464#9,<xmm2=int6464#3
352# asm 2: pxor  <xmm8=%xmm8,<xmm2=%xmm2
353pxor  %xmm8,%xmm2
354
355# qhasm:     xmm8 = xmm0
356# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
357# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
358movdqa %xmm0,%xmm8
359
360# qhasm:     uint6464 xmm8 >>= 1
361# asm 1: psrlq $1,<xmm8=int6464#9
362# asm 2: psrlq $1,<xmm8=%xmm8
363psrlq $1,%xmm8
364
365# qhasm:     xmm8 ^= xmm1
366# asm 1: pxor  <xmm1=int6464#2,<xmm8=int6464#9
367# asm 2: pxor  <xmm1=%xmm1,<xmm8=%xmm8
368pxor  %xmm1,%xmm8
369
370# qhasm:     xmm8 &= BS0
371# asm 1: pand  BS0,<xmm8=int6464#9
372# asm 2: pand  BS0,<xmm8=%xmm8
373pand  BS0,%xmm8
374
375# qhasm:     xmm1 ^= xmm8
376# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
377# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
378pxor  %xmm8,%xmm1
379
380# qhasm:     uint6464 xmm8 <<= 1
381# asm 1: psllq $1,<xmm8=int6464#9
382# asm 2: psllq $1,<xmm8=%xmm8
383psllq $1,%xmm8
384
385# qhasm:     xmm0 ^= xmm8
386# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
387# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
388pxor  %xmm8,%xmm0
389
390# qhasm:     xmm8 = xmm5
391# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9
392# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8
393movdqa %xmm5,%xmm8
394
395# qhasm:     uint6464 xmm8 >>= 2
396# asm 1: psrlq $2,<xmm8=int6464#9
397# asm 2: psrlq $2,<xmm8=%xmm8
398psrlq $2,%xmm8
399
400# qhasm:     xmm8 ^= xmm7
401# asm 1: pxor  <xmm7=int6464#8,<xmm8=int6464#9
402# asm 2: pxor  <xmm7=%xmm7,<xmm8=%xmm8
403pxor  %xmm7,%xmm8
404
405# qhasm:     xmm8 &= BS1
406# asm 1: pand  BS1,<xmm8=int6464#9
407# asm 2: pand  BS1,<xmm8=%xmm8
408pand  BS1,%xmm8
409
410# qhasm:     xmm7 ^= xmm8
411# asm 1: pxor  <xmm8=int6464#9,<xmm7=int6464#8
412# asm 2: pxor  <xmm8=%xmm8,<xmm7=%xmm7
413pxor  %xmm8,%xmm7
414
415# qhasm:     uint6464 xmm8 <<= 2
416# asm 1: psllq $2,<xmm8=int6464#9
417# asm 2: psllq $2,<xmm8=%xmm8
418psllq $2,%xmm8
419
420# qhasm:     xmm5 ^= xmm8
421# asm 1: pxor  <xmm8=int6464#9,<xmm5=int6464#6
422# asm 2: pxor  <xmm8=%xmm8,<xmm5=%xmm5
423pxor  %xmm8,%xmm5
424
425# qhasm:     xmm8 = xmm4
426# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
427# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
428movdqa %xmm4,%xmm8
429
430# qhasm:     uint6464 xmm8 >>= 2
431# asm 1: psrlq $2,<xmm8=int6464#9
432# asm 2: psrlq $2,<xmm8=%xmm8
433psrlq $2,%xmm8
434
435# qhasm:     xmm8 ^= xmm6
436# asm 1: pxor  <xmm6=int6464#7,<xmm8=int6464#9
437# asm 2: pxor  <xmm6=%xmm6,<xmm8=%xmm8
438pxor  %xmm6,%xmm8
439
440# qhasm:     xmm8 &= BS1
441# asm 1: pand  BS1,<xmm8=int6464#9
442# asm 2: pand  BS1,<xmm8=%xmm8
443pand  BS1,%xmm8
444
445# qhasm:     xmm6 ^= xmm8
446# asm 1: pxor  <xmm8=int6464#9,<xmm6=int6464#7
447# asm 2: pxor  <xmm8=%xmm8,<xmm6=%xmm6
448pxor  %xmm8,%xmm6
449
450# qhasm:     uint6464 xmm8 <<= 2
451# asm 1: psllq $2,<xmm8=int6464#9
452# asm 2: psllq $2,<xmm8=%xmm8
453psllq $2,%xmm8
454
455# qhasm:     xmm4 ^= xmm8
456# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
457# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
458pxor  %xmm8,%xmm4
459
460# qhasm:     xmm8 = xmm1
461# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
462# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
463movdqa %xmm1,%xmm8
464
465# qhasm:     uint6464 xmm8 >>= 2
466# asm 1: psrlq $2,<xmm8=int6464#9
467# asm 2: psrlq $2,<xmm8=%xmm8
468psrlq $2,%xmm8
469
470# qhasm:     xmm8 ^= xmm3
471# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#9
472# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm8
473pxor  %xmm3,%xmm8
474
475# qhasm:     xmm8 &= BS1
476# asm 1: pand  BS1,<xmm8=int6464#9
477# asm 2: pand  BS1,<xmm8=%xmm8
478pand  BS1,%xmm8
479
480# qhasm:     xmm3 ^= xmm8
481# asm 1: pxor  <xmm8=int6464#9,<xmm3=int6464#4
482# asm 2: pxor  <xmm8=%xmm8,<xmm3=%xmm3
483pxor  %xmm8,%xmm3
484
485# qhasm:     uint6464 xmm8 <<= 2
486# asm 1: psllq $2,<xmm8=int6464#9
487# asm 2: psllq $2,<xmm8=%xmm8
488psllq $2,%xmm8
489
490# qhasm:     xmm1 ^= xmm8
491# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
492# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
493pxor  %xmm8,%xmm1
494
495# qhasm:     xmm8 = xmm0
496# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
497# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
498movdqa %xmm0,%xmm8
499
500# qhasm:     uint6464 xmm8 >>= 2
501# asm 1: psrlq $2,<xmm8=int6464#9
502# asm 2: psrlq $2,<xmm8=%xmm8
503psrlq $2,%xmm8
504
505# qhasm:     xmm8 ^= xmm2
506# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#9
507# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm8
508pxor  %xmm2,%xmm8
509
510# qhasm:     xmm8 &= BS1
511# asm 1: pand  BS1,<xmm8=int6464#9
512# asm 2: pand  BS1,<xmm8=%xmm8
513pand  BS1,%xmm8
514
515# qhasm:     xmm2 ^= xmm8
516# asm 1: pxor  <xmm8=int6464#9,<xmm2=int6464#3
517# asm 2: pxor  <xmm8=%xmm8,<xmm2=%xmm2
518pxor  %xmm8,%xmm2
519
520# qhasm:     uint6464 xmm8 <<= 2
521# asm 1: psllq $2,<xmm8=int6464#9
522# asm 2: psllq $2,<xmm8=%xmm8
523psllq $2,%xmm8
524
525# qhasm:     xmm0 ^= xmm8
526# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
527# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
528pxor  %xmm8,%xmm0
529
530# qhasm:     xmm8 = xmm3
531# asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9
532# asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8
533movdqa %xmm3,%xmm8
534
535# qhasm:     uint6464 xmm8 >>= 4
536# asm 1: psrlq $4,<xmm8=int6464#9
537# asm 2: psrlq $4,<xmm8=%xmm8
538psrlq $4,%xmm8
539
540# qhasm:     xmm8 ^= xmm7
541# asm 1: pxor  <xmm7=int6464#8,<xmm8=int6464#9
542# asm 2: pxor  <xmm7=%xmm7,<xmm8=%xmm8
543pxor  %xmm7,%xmm8
544
545# qhasm:     xmm8 &= BS2
546# asm 1: pand  BS2,<xmm8=int6464#9
547# asm 2: pand  BS2,<xmm8=%xmm8
548pand  BS2,%xmm8
549
550# qhasm:     xmm7 ^= xmm8
551# asm 1: pxor  <xmm8=int6464#9,<xmm7=int6464#8
552# asm 2: pxor  <xmm8=%xmm8,<xmm7=%xmm7
553pxor  %xmm8,%xmm7
554
555# qhasm:     uint6464 xmm8 <<= 4
556# asm 1: psllq $4,<xmm8=int6464#9
557# asm 2: psllq $4,<xmm8=%xmm8
558psllq $4,%xmm8
559
560# qhasm:     xmm3 ^= xmm8
561# asm 1: pxor  <xmm8=int6464#9,<xmm3=int6464#4
562# asm 2: pxor  <xmm8=%xmm8,<xmm3=%xmm3
563pxor  %xmm8,%xmm3
564
565# qhasm:     xmm8 = xmm2
566# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
567# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
568movdqa %xmm2,%xmm8
569
570# qhasm:     uint6464 xmm8 >>= 4
571# asm 1: psrlq $4,<xmm8=int6464#9
572# asm 2: psrlq $4,<xmm8=%xmm8
573psrlq $4,%xmm8
574
575# qhasm:     xmm8 ^= xmm6
576# asm 1: pxor  <xmm6=int6464#7,<xmm8=int6464#9
577# asm 2: pxor  <xmm6=%xmm6,<xmm8=%xmm8
578pxor  %xmm6,%xmm8
579
580# qhasm:     xmm8 &= BS2
581# asm 1: pand  BS2,<xmm8=int6464#9
582# asm 2: pand  BS2,<xmm8=%xmm8
583pand  BS2,%xmm8
584
585# qhasm:     xmm6 ^= xmm8
586# asm 1: pxor  <xmm8=int6464#9,<xmm6=int6464#7
587# asm 2: pxor  <xmm8=%xmm8,<xmm6=%xmm6
588pxor  %xmm8,%xmm6
589
590# qhasm:     uint6464 xmm8 <<= 4
591# asm 1: psllq $4,<xmm8=int6464#9
592# asm 2: psllq $4,<xmm8=%xmm8
593psllq $4,%xmm8
594
595# qhasm:     xmm2 ^= xmm8
596# asm 1: pxor  <xmm8=int6464#9,<xmm2=int6464#3
597# asm 2: pxor  <xmm8=%xmm8,<xmm2=%xmm2
598pxor  %xmm8,%xmm2
599
600# qhasm:     xmm8 = xmm1
601# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
602# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
603movdqa %xmm1,%xmm8
604
605# qhasm:     uint6464 xmm8 >>= 4
606# asm 1: psrlq $4,<xmm8=int6464#9
607# asm 2: psrlq $4,<xmm8=%xmm8
608psrlq $4,%xmm8
609
610# qhasm:     xmm8 ^= xmm5
611# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
612# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
613pxor  %xmm5,%xmm8
614
615# qhasm:     xmm8 &= BS2
616# asm 1: pand  BS2,<xmm8=int6464#9
617# asm 2: pand  BS2,<xmm8=%xmm8
618pand  BS2,%xmm8
619
620# qhasm:     xmm5 ^= xmm8
621# asm 1: pxor  <xmm8=int6464#9,<xmm5=int6464#6
622# asm 2: pxor  <xmm8=%xmm8,<xmm5=%xmm5
623pxor  %xmm8,%xmm5
624
625# qhasm:     uint6464 xmm8 <<= 4
626# asm 1: psllq $4,<xmm8=int6464#9
627# asm 2: psllq $4,<xmm8=%xmm8
628psllq $4,%xmm8
629
630# qhasm:     xmm1 ^= xmm8
631# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
632# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
633pxor  %xmm8,%xmm1
634
635# qhasm:     xmm8 = xmm0
636# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
637# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
638movdqa %xmm0,%xmm8
639
640# qhasm:     uint6464 xmm8 >>= 4
641# asm 1: psrlq $4,<xmm8=int6464#9
642# asm 2: psrlq $4,<xmm8=%xmm8
643psrlq $4,%xmm8
644
645# qhasm:     xmm8 ^= xmm4
646# asm 1: pxor  <xmm4=int6464#5,<xmm8=int6464#9
647# asm 2: pxor  <xmm4=%xmm4,<xmm8=%xmm8
648pxor  %xmm4,%xmm8
649
650# qhasm:     xmm8 &= BS2
651# asm 1: pand  BS2,<xmm8=int6464#9
652# asm 2: pand  BS2,<xmm8=%xmm8
653pand  BS2,%xmm8
654
655# qhasm:     xmm4 ^= xmm8
656# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
657# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
658pxor  %xmm8,%xmm4
659
660# qhasm:     uint6464 xmm8 <<= 4
661# asm 1: psllq $4,<xmm8=int6464#9
662# asm 2: psllq $4,<xmm8=%xmm8
663psllq $4,%xmm8
664
665# qhasm:     xmm0 ^= xmm8
666# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
667# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
668pxor  %xmm8,%xmm0
669
670# qhasm:     xmm0 ^= *(int128 *)(c + 0)
671# asm 1: pxor 0(<c=int64#4),<xmm0=int6464#1
672# asm 2: pxor 0(<c=%rcx),<xmm0=%xmm0
673pxor 0(%rcx),%xmm0
674
675# qhasm:     shuffle bytes of xmm0 by SR
676# asm 1: pshufb SR,<xmm0=int6464#1
677# asm 2: pshufb SR,<xmm0=%xmm0
678pshufb SR,%xmm0
679
680# qhasm:     xmm1 ^= *(int128 *)(c + 16)
681# asm 1: pxor 16(<c=int64#4),<xmm1=int6464#2
682# asm 2: pxor 16(<c=%rcx),<xmm1=%xmm1
683pxor 16(%rcx),%xmm1
684
685# qhasm:     shuffle bytes of xmm1 by SR
686# asm 1: pshufb SR,<xmm1=int6464#2
687# asm 2: pshufb SR,<xmm1=%xmm1
688pshufb SR,%xmm1
689
690# qhasm:     xmm2 ^= *(int128 *)(c + 32)
691# asm 1: pxor 32(<c=int64#4),<xmm2=int6464#3
692# asm 2: pxor 32(<c=%rcx),<xmm2=%xmm2
693pxor 32(%rcx),%xmm2
694
695# qhasm:     shuffle bytes of xmm2 by SR
696# asm 1: pshufb SR,<xmm2=int6464#3
697# asm 2: pshufb SR,<xmm2=%xmm2
698pshufb SR,%xmm2
699
700# qhasm:     xmm3 ^= *(int128 *)(c + 48)
701# asm 1: pxor 48(<c=int64#4),<xmm3=int6464#4
702# asm 2: pxor 48(<c=%rcx),<xmm3=%xmm3
703pxor 48(%rcx),%xmm3
704
705# qhasm:     shuffle bytes of xmm3 by SR
706# asm 1: pshufb SR,<xmm3=int6464#4
707# asm 2: pshufb SR,<xmm3=%xmm3
708pshufb SR,%xmm3
709
710# qhasm:     xmm4 ^= *(int128 *)(c + 64)
711# asm 1: pxor 64(<c=int64#4),<xmm4=int6464#5
712# asm 2: pxor 64(<c=%rcx),<xmm4=%xmm4
713pxor 64(%rcx),%xmm4
714
715# qhasm:     shuffle bytes of xmm4 by SR
716# asm 1: pshufb SR,<xmm4=int6464#5
717# asm 2: pshufb SR,<xmm4=%xmm4
718pshufb SR,%xmm4
719
720# qhasm:     xmm5 ^= *(int128 *)(c + 80)
721# asm 1: pxor 80(<c=int64#4),<xmm5=int6464#6
722# asm 2: pxor 80(<c=%rcx),<xmm5=%xmm5
723pxor 80(%rcx),%xmm5
724
725# qhasm:     shuffle bytes of xmm5 by SR
726# asm 1: pshufb SR,<xmm5=int6464#6
727# asm 2: pshufb SR,<xmm5=%xmm5
728pshufb SR,%xmm5
729
730# qhasm:     xmm6 ^= *(int128 *)(c + 96)
731# asm 1: pxor 96(<c=int64#4),<xmm6=int6464#7
732# asm 2: pxor 96(<c=%rcx),<xmm6=%xmm6
733pxor 96(%rcx),%xmm6
734
735# qhasm:     shuffle bytes of xmm6 by SR
736# asm 1: pshufb SR,<xmm6=int6464#7
737# asm 2: pshufb SR,<xmm6=%xmm6
738pshufb SR,%xmm6
739
740# qhasm:     xmm7 ^= *(int128 *)(c + 112)
741# asm 1: pxor 112(<c=int64#4),<xmm7=int6464#8
742# asm 2: pxor 112(<c=%rcx),<xmm7=%xmm7
743pxor 112(%rcx),%xmm7
744
745# qhasm:     shuffle bytes of xmm7 by SR
746# asm 1: pshufb SR,<xmm7=int6464#8
747# asm 2: pshufb SR,<xmm7=%xmm7
748pshufb SR,%xmm7
749
750# qhasm:       xmm5 ^= xmm6
751# asm 1: pxor  <xmm6=int6464#7,<xmm5=int6464#6
752# asm 2: pxor  <xmm6=%xmm6,<xmm5=%xmm5
753pxor  %xmm6,%xmm5
754
755# qhasm:       xmm2 ^= xmm1
756# asm 1: pxor  <xmm1=int6464#2,<xmm2=int6464#3
757# asm 2: pxor  <xmm1=%xmm1,<xmm2=%xmm2
758pxor  %xmm1,%xmm2
759
760# qhasm:       xmm5 ^= xmm0
761# asm 1: pxor  <xmm0=int6464#1,<xmm5=int6464#6
762# asm 2: pxor  <xmm0=%xmm0,<xmm5=%xmm5
763pxor  %xmm0,%xmm5
764
765# qhasm:       xmm6 ^= xmm2
766# asm 1: pxor  <xmm2=int6464#3,<xmm6=int6464#7
767# asm 2: pxor  <xmm2=%xmm2,<xmm6=%xmm6
768pxor  %xmm2,%xmm6
769
770# qhasm:       xmm3 ^= xmm0
771# asm 1: pxor  <xmm0=int6464#1,<xmm3=int6464#4
772# asm 2: pxor  <xmm0=%xmm0,<xmm3=%xmm3
773pxor  %xmm0,%xmm3
774
775# qhasm:       xmm6 ^= xmm3
776# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
777# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
778pxor  %xmm3,%xmm6
779
780# qhasm:       xmm3 ^= xmm7
781# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#4
782# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm3
783pxor  %xmm7,%xmm3
784
785# qhasm:       xmm3 ^= xmm4
786# asm 1: pxor  <xmm4=int6464#5,<xmm3=int6464#4
787# asm 2: pxor  <xmm4=%xmm4,<xmm3=%xmm3
788pxor  %xmm4,%xmm3
789
790# qhasm:       xmm7 ^= xmm5
791# asm 1: pxor  <xmm5=int6464#6,<xmm7=int6464#8
792# asm 2: pxor  <xmm5=%xmm5,<xmm7=%xmm7
793pxor  %xmm5,%xmm7
794
795# qhasm:       xmm3 ^= xmm1
796# asm 1: pxor  <xmm1=int6464#2,<xmm3=int6464#4
797# asm 2: pxor  <xmm1=%xmm1,<xmm3=%xmm3
798pxor  %xmm1,%xmm3
799
800# qhasm:       xmm4 ^= xmm5
801# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
802# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
803pxor  %xmm5,%xmm4
804
805# qhasm:       xmm2 ^= xmm7
806# asm 1: pxor  <xmm7=int6464#8,<xmm2=int6464#3
807# asm 2: pxor  <xmm7=%xmm7,<xmm2=%xmm2
808pxor  %xmm7,%xmm2
809
810# qhasm:       xmm1 ^= xmm5
811# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
812# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
813pxor  %xmm5,%xmm1
814
815# qhasm:       xmm11 = xmm7
816# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
817# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
818movdqa %xmm7,%xmm8
819
820# qhasm:       xmm10 = xmm1
821# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
822# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
823movdqa %xmm1,%xmm9
824
825# qhasm:       xmm9 = xmm5
826# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
827# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
828movdqa %xmm5,%xmm10
829
830# qhasm:       xmm13 = xmm2
831# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
832# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
833movdqa %xmm2,%xmm11
834
835# qhasm:       xmm12 = xmm6
836# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
837# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
838movdqa %xmm6,%xmm12
839
840# qhasm:       xmm11 ^= xmm4
841# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#9
842# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm8
843pxor  %xmm4,%xmm8
844
845# qhasm:       xmm10 ^= xmm2
846# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#10
847# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm9
848pxor  %xmm2,%xmm9
849
850# qhasm:       xmm9 ^= xmm3
851# asm 1: pxor  <xmm3=int6464#4,<xmm9=int6464#11
852# asm 2: pxor  <xmm3=%xmm3,<xmm9=%xmm10
853pxor  %xmm3,%xmm10
854
855# qhasm:       xmm13 ^= xmm4
856# asm 1: pxor  <xmm4=int6464#5,<xmm13=int6464#12
857# asm 2: pxor  <xmm4=%xmm4,<xmm13=%xmm11
858pxor  %xmm4,%xmm11
859
860# qhasm:       xmm12 ^= xmm0
861# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
862# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
863pxor  %xmm0,%xmm12
864
865# qhasm:       xmm14 = xmm11
866# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
867# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
868movdqa %xmm8,%xmm13
869
870# qhasm:       xmm8 = xmm10
871# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
872# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
873movdqa %xmm9,%xmm14
874
875# qhasm:       xmm15 = xmm11
876# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
877# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
878movdqa %xmm8,%xmm15
879
880# qhasm:       xmm10 |= xmm9
881# asm 1: por   <xmm9=int6464#11,<xmm10=int6464#10
882# asm 2: por   <xmm9=%xmm10,<xmm10=%xmm9
883por   %xmm10,%xmm9
884
885# qhasm:       xmm11 |= xmm12
886# asm 1: por   <xmm12=int6464#13,<xmm11=int6464#9
887# asm 2: por   <xmm12=%xmm12,<xmm11=%xmm8
888por   %xmm12,%xmm8
889
890# qhasm:       xmm15 ^= xmm8
891# asm 1: pxor  <xmm8=int6464#15,<xmm15=int6464#16
892# asm 2: pxor  <xmm8=%xmm14,<xmm15=%xmm15
893pxor  %xmm14,%xmm15
894
895# qhasm:       xmm14 &= xmm12
896# asm 1: pand  <xmm12=int6464#13,<xmm14=int6464#14
897# asm 2: pand  <xmm12=%xmm12,<xmm14=%xmm13
898pand  %xmm12,%xmm13
899
900# qhasm:       xmm8 &= xmm9
901# asm 1: pand  <xmm9=int6464#11,<xmm8=int6464#15
902# asm 2: pand  <xmm9=%xmm10,<xmm8=%xmm14
903pand  %xmm10,%xmm14
904
905# qhasm:       xmm12 ^= xmm9
906# asm 1: pxor  <xmm9=int6464#11,<xmm12=int6464#13
907# asm 2: pxor  <xmm9=%xmm10,<xmm12=%xmm12
908pxor  %xmm10,%xmm12
909
910# qhasm:       xmm15 &= xmm12
911# asm 1: pand  <xmm12=int6464#13,<xmm15=int6464#16
912# asm 2: pand  <xmm12=%xmm12,<xmm15=%xmm15
913pand  %xmm12,%xmm15
914
915# qhasm:       xmm12 = xmm3
916# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
917# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
918movdqa %xmm3,%xmm10
919
920# qhasm:       xmm12 ^= xmm0
921# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#11
922# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm10
923pxor  %xmm0,%xmm10
924
925# qhasm:       xmm13 &= xmm12
926# asm 1: pand  <xmm12=int6464#11,<xmm13=int6464#12
927# asm 2: pand  <xmm12=%xmm10,<xmm13=%xmm11
928pand  %xmm10,%xmm11
929
930# qhasm:       xmm11 ^= xmm13
931# asm 1: pxor  <xmm13=int6464#12,<xmm11=int6464#9
932# asm 2: pxor  <xmm13=%xmm11,<xmm11=%xmm8
933pxor  %xmm11,%xmm8
934
935# qhasm:       xmm10 ^= xmm13
936# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
937# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
938pxor  %xmm11,%xmm9
939
940# qhasm:       xmm13 = xmm7
941# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
942# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
943movdqa %xmm7,%xmm10
944
945# qhasm:       xmm13 ^= xmm1
946# asm 1: pxor  <xmm1=int6464#2,<xmm13=int6464#11
947# asm 2: pxor  <xmm1=%xmm1,<xmm13=%xmm10
948pxor  %xmm1,%xmm10
949
950# qhasm:       xmm12 = xmm5
951# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
952# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
953movdqa %xmm5,%xmm11
954
955# qhasm:       xmm9 = xmm13
956# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
957# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
958movdqa %xmm10,%xmm12
959
960# qhasm:       xmm12 ^= xmm6
961# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#12
962# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm11
963pxor  %xmm6,%xmm11
964
965# qhasm:       xmm9 |= xmm12
966# asm 1: por   <xmm12=int6464#12,<xmm9=int6464#13
967# asm 2: por   <xmm12=%xmm11,<xmm9=%xmm12
968por   %xmm11,%xmm12
969
970# qhasm:       xmm13 &= xmm12
971# asm 1: pand  <xmm12=int6464#12,<xmm13=int6464#11
972# asm 2: pand  <xmm12=%xmm11,<xmm13=%xmm10
973pand  %xmm11,%xmm10
974
975# qhasm:       xmm8 ^= xmm13
976# asm 1: pxor  <xmm13=int6464#11,<xmm8=int6464#15
977# asm 2: pxor  <xmm13=%xmm10,<xmm8=%xmm14
978pxor  %xmm10,%xmm14
979
980# qhasm:       xmm11 ^= xmm15
981# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#9
982# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm8
983pxor  %xmm15,%xmm8
984
985# qhasm:       xmm10 ^= xmm14
986# asm 1: pxor  <xmm14=int6464#14,<xmm10=int6464#10
987# asm 2: pxor  <xmm14=%xmm13,<xmm10=%xmm9
988pxor  %xmm13,%xmm9
989
990# qhasm:       xmm9 ^= xmm15
991# asm 1: pxor  <xmm15=int6464#16,<xmm9=int6464#13
992# asm 2: pxor  <xmm15=%xmm15,<xmm9=%xmm12
993pxor  %xmm15,%xmm12
994
995# qhasm:       xmm8 ^= xmm14
996# asm 1: pxor  <xmm14=int6464#14,<xmm8=int6464#15
997# asm 2: pxor  <xmm14=%xmm13,<xmm8=%xmm14
998pxor  %xmm13,%xmm14
999
1000# qhasm:       xmm9 ^= xmm14
1001# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
1002# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
1003pxor  %xmm13,%xmm12
1004
1005# qhasm:       xmm12 = xmm2
1006# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
1007# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
1008movdqa %xmm2,%xmm10
1009
1010# qhasm:       xmm13 = xmm4
1011# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
1012# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
1013movdqa %xmm4,%xmm11
1014
1015# qhasm:       xmm14 = xmm1
1016# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
1017# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
1018movdqa %xmm1,%xmm13
1019
1020# qhasm:       xmm15 = xmm7
1021# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
1022# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
1023movdqa %xmm7,%xmm15
1024
1025# qhasm:       xmm12 &= xmm3
1026# asm 1: pand  <xmm3=int6464#4,<xmm12=int6464#11
1027# asm 2: pand  <xmm3=%xmm3,<xmm12=%xmm10
1028pand  %xmm3,%xmm10
1029
1030# qhasm:       xmm13 &= xmm0
1031# asm 1: pand  <xmm0=int6464#1,<xmm13=int6464#12
1032# asm 2: pand  <xmm0=%xmm0,<xmm13=%xmm11
1033pand  %xmm0,%xmm11
1034
1035# qhasm:       xmm14 &= xmm5
1036# asm 1: pand  <xmm5=int6464#6,<xmm14=int6464#14
1037# asm 2: pand  <xmm5=%xmm5,<xmm14=%xmm13
1038pand  %xmm5,%xmm13
1039
1040# qhasm:       xmm15 |= xmm6
1041# asm 1: por   <xmm6=int6464#7,<xmm15=int6464#16
1042# asm 2: por   <xmm6=%xmm6,<xmm15=%xmm15
1043por   %xmm6,%xmm15
1044
1045# qhasm:       xmm11 ^= xmm12
1046# asm 1: pxor  <xmm12=int6464#11,<xmm11=int6464#9
1047# asm 2: pxor  <xmm12=%xmm10,<xmm11=%xmm8
1048pxor  %xmm10,%xmm8
1049
1050# qhasm:       xmm10 ^= xmm13
1051# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
1052# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
1053pxor  %xmm11,%xmm9
1054
1055# qhasm:       xmm9 ^= xmm14
1056# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
1057# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
1058pxor  %xmm13,%xmm12
1059
1060# qhasm:       xmm8 ^= xmm15
1061# asm 1: pxor  <xmm15=int6464#16,<xmm8=int6464#15
1062# asm 2: pxor  <xmm15=%xmm15,<xmm8=%xmm14
1063pxor  %xmm15,%xmm14
1064
1065# qhasm:       xmm12 = xmm11
1066# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1067# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1068movdqa %xmm8,%xmm10
1069
1070# qhasm:       xmm12 ^= xmm10
1071# asm 1: pxor  <xmm10=int6464#10,<xmm12=int6464#11
1072# asm 2: pxor  <xmm10=%xmm9,<xmm12=%xmm10
1073pxor  %xmm9,%xmm10
1074
1075# qhasm:       xmm11 &= xmm9
1076# asm 1: pand  <xmm9=int6464#13,<xmm11=int6464#9
1077# asm 2: pand  <xmm9=%xmm12,<xmm11=%xmm8
1078pand  %xmm12,%xmm8
1079
1080# qhasm:       xmm14 = xmm8
1081# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1082# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1083movdqa %xmm14,%xmm11
1084
1085# qhasm:       xmm14 ^= xmm11
1086# asm 1: pxor  <xmm11=int6464#9,<xmm14=int6464#12
1087# asm 2: pxor  <xmm11=%xmm8,<xmm14=%xmm11
1088pxor  %xmm8,%xmm11
1089
1090# qhasm:       xmm15 = xmm12
1091# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1092# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1093movdqa %xmm10,%xmm13
1094
1095# qhasm:       xmm15 &= xmm14
1096# asm 1: pand  <xmm14=int6464#12,<xmm15=int6464#14
1097# asm 2: pand  <xmm14=%xmm11,<xmm15=%xmm13
1098pand  %xmm11,%xmm13
1099
1100# qhasm:       xmm15 ^= xmm10
1101# asm 1: pxor  <xmm10=int6464#10,<xmm15=int6464#14
1102# asm 2: pxor  <xmm10=%xmm9,<xmm15=%xmm13
1103pxor  %xmm9,%xmm13
1104
1105# qhasm:       xmm13 = xmm9
1106# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1107# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1108movdqa %xmm12,%xmm15
1109
1110# qhasm:       xmm13 ^= xmm8
1111# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
1112# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
1113pxor  %xmm14,%xmm15
1114
1115# qhasm:       xmm11 ^= xmm10
1116# asm 1: pxor  <xmm10=int6464#10,<xmm11=int6464#9
1117# asm 2: pxor  <xmm10=%xmm9,<xmm11=%xmm8
1118pxor  %xmm9,%xmm8
1119
1120# qhasm:       xmm13 &= xmm11
1121# asm 1: pand  <xmm11=int6464#9,<xmm13=int6464#16
1122# asm 2: pand  <xmm11=%xmm8,<xmm13=%xmm15
1123pand  %xmm8,%xmm15
1124
1125# qhasm:       xmm13 ^= xmm8
1126# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
1127# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
1128pxor  %xmm14,%xmm15
1129
1130# qhasm:       xmm9 ^= xmm13
1131# asm 1: pxor  <xmm13=int6464#16,<xmm9=int6464#13
1132# asm 2: pxor  <xmm13=%xmm15,<xmm9=%xmm12
1133pxor  %xmm15,%xmm12
1134
1135# qhasm:       xmm10 = xmm14
1136# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1137# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1138movdqa %xmm11,%xmm8
1139
1140# qhasm:       xmm10 ^= xmm13
1141# asm 1: pxor  <xmm13=int6464#16,<xmm10=int6464#9
1142# asm 2: pxor  <xmm13=%xmm15,<xmm10=%xmm8
1143pxor  %xmm15,%xmm8
1144
1145# qhasm:       xmm10 &= xmm8
1146# asm 1: pand  <xmm8=int6464#15,<xmm10=int6464#9
1147# asm 2: pand  <xmm8=%xmm14,<xmm10=%xmm8
1148pand  %xmm14,%xmm8
1149
1150# qhasm:       xmm9 ^= xmm10
1151# asm 1: pxor  <xmm10=int6464#9,<xmm9=int6464#13
1152# asm 2: pxor  <xmm10=%xmm8,<xmm9=%xmm12
1153pxor  %xmm8,%xmm12
1154
1155# qhasm:       xmm14 ^= xmm10
1156# asm 1: pxor  <xmm10=int6464#9,<xmm14=int6464#12
1157# asm 2: pxor  <xmm10=%xmm8,<xmm14=%xmm11
1158pxor  %xmm8,%xmm11
1159
1160# qhasm:       xmm14 &= xmm15
1161# asm 1: pand  <xmm15=int6464#14,<xmm14=int6464#12
1162# asm 2: pand  <xmm15=%xmm13,<xmm14=%xmm11
1163pand  %xmm13,%xmm11
1164
1165# qhasm:       xmm14 ^= xmm12
1166# asm 1: pxor  <xmm12=int6464#11,<xmm14=int6464#12
1167# asm 2: pxor  <xmm12=%xmm10,<xmm14=%xmm11
1168pxor  %xmm10,%xmm11
1169
1170# qhasm:         xmm12 = xmm6
1171# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1172# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1173movdqa %xmm6,%xmm8
1174
1175# qhasm:         xmm8 = xmm5
1176# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1177# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1178movdqa %xmm5,%xmm9
1179
1180# qhasm:           xmm10 = xmm15
1181# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1182# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1183movdqa %xmm13,%xmm10
1184
1185# qhasm:           xmm10 ^= xmm14
1186# asm 1: pxor  <xmm14=int6464#12,<xmm10=int6464#11
1187# asm 2: pxor  <xmm14=%xmm11,<xmm10=%xmm10
1188pxor  %xmm11,%xmm10
1189
1190# qhasm:           xmm10 &= xmm6
1191# asm 1: pand  <xmm6=int6464#7,<xmm10=int6464#11
1192# asm 2: pand  <xmm6=%xmm6,<xmm10=%xmm10
1193pand  %xmm6,%xmm10
1194
1195# qhasm:           xmm6 ^= xmm5
1196# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
1197# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
1198pxor  %xmm5,%xmm6
1199
1200# qhasm:           xmm6 &= xmm14
1201# asm 1: pand  <xmm14=int6464#12,<xmm6=int6464#7
1202# asm 2: pand  <xmm14=%xmm11,<xmm6=%xmm6
1203pand  %xmm11,%xmm6
1204
1205# qhasm:           xmm5 &= xmm15
1206# asm 1: pand  <xmm15=int6464#14,<xmm5=int6464#6
1207# asm 2: pand  <xmm15=%xmm13,<xmm5=%xmm5
1208pand  %xmm13,%xmm5
1209
1210# qhasm:           xmm6 ^= xmm5
1211# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
1212# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
1213pxor  %xmm5,%xmm6
1214
1215# qhasm:           xmm5 ^= xmm10
1216# asm 1: pxor  <xmm10=int6464#11,<xmm5=int6464#6
1217# asm 2: pxor  <xmm10=%xmm10,<xmm5=%xmm5
1218pxor  %xmm10,%xmm5
1219
1220# qhasm:         xmm12 ^= xmm0
1221# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#9
1222# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm8
1223pxor  %xmm0,%xmm8
1224
1225# qhasm:         xmm8 ^= xmm3
1226# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#10
1227# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm9
1228pxor  %xmm3,%xmm9
1229
1230# qhasm:         xmm15 ^= xmm13
1231# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
1232# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
1233pxor  %xmm15,%xmm13
1234
1235# qhasm:         xmm14 ^= xmm9
1236# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
1237# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
1238pxor  %xmm12,%xmm11
1239
1240# qhasm:           xmm11 = xmm15
1241# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1242# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1243movdqa %xmm13,%xmm10
1244
1245# qhasm:           xmm11 ^= xmm14
1246# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
1247# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
1248pxor  %xmm11,%xmm10
1249
1250# qhasm:           xmm11 &= xmm12
1251# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
1252# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
1253pand  %xmm8,%xmm10
1254
1255# qhasm:           xmm12 ^= xmm8
1256# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
1257# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
1258pxor  %xmm9,%xmm8
1259
1260# qhasm:           xmm12 &= xmm14
1261# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
1262# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
1263pand  %xmm11,%xmm8
1264
1265# qhasm:           xmm8 &= xmm15
1266# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
1267# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
1268pand  %xmm13,%xmm9
1269
1270# qhasm:           xmm8 ^= xmm12
1271# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
1272# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
1273pxor  %xmm8,%xmm9
1274
1275# qhasm:           xmm12 ^= xmm11
1276# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
1277# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
1278pxor  %xmm10,%xmm8
1279
1280# qhasm:           xmm10 = xmm13
1281# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1282# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1283movdqa %xmm15,%xmm10
1284
1285# qhasm:           xmm10 ^= xmm9
1286# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
1287# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
1288pxor  %xmm12,%xmm10
1289
1290# qhasm:           xmm10 &= xmm0
1291# asm 1: pand  <xmm0=int6464#1,<xmm10=int6464#11
1292# asm 2: pand  <xmm0=%xmm0,<xmm10=%xmm10
1293pand  %xmm0,%xmm10
1294
1295# qhasm:           xmm0 ^= xmm3
1296# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
1297# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
1298pxor  %xmm3,%xmm0
1299
1300# qhasm:           xmm0 &= xmm9
1301# asm 1: pand  <xmm9=int6464#13,<xmm0=int6464#1
1302# asm 2: pand  <xmm9=%xmm12,<xmm0=%xmm0
1303pand  %xmm12,%xmm0
1304
1305# qhasm:           xmm3 &= xmm13
1306# asm 1: pand  <xmm13=int6464#16,<xmm3=int6464#4
1307# asm 2: pand  <xmm13=%xmm15,<xmm3=%xmm3
1308pand  %xmm15,%xmm3
1309
1310# qhasm:           xmm0 ^= xmm3
1311# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
1312# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
1313pxor  %xmm3,%xmm0
1314
1315# qhasm:           xmm3 ^= xmm10
1316# asm 1: pxor  <xmm10=int6464#11,<xmm3=int6464#4
1317# asm 2: pxor  <xmm10=%xmm10,<xmm3=%xmm3
1318pxor  %xmm10,%xmm3
1319
1320# qhasm:         xmm6 ^= xmm12
1321# asm 1: pxor  <xmm12=int6464#9,<xmm6=int6464#7
1322# asm 2: pxor  <xmm12=%xmm8,<xmm6=%xmm6
1323pxor  %xmm8,%xmm6
1324
1325# qhasm:         xmm0 ^= xmm12
1326# asm 1: pxor  <xmm12=int6464#9,<xmm0=int6464#1
1327# asm 2: pxor  <xmm12=%xmm8,<xmm0=%xmm0
1328pxor  %xmm8,%xmm0
1329
1330# qhasm:         xmm5 ^= xmm8
1331# asm 1: pxor  <xmm8=int6464#10,<xmm5=int6464#6
1332# asm 2: pxor  <xmm8=%xmm9,<xmm5=%xmm5
1333pxor  %xmm9,%xmm5
1334
1335# qhasm:         xmm3 ^= xmm8
1336# asm 1: pxor  <xmm8=int6464#10,<xmm3=int6464#4
1337# asm 2: pxor  <xmm8=%xmm9,<xmm3=%xmm3
1338pxor  %xmm9,%xmm3
1339
1340# qhasm:         xmm12 = xmm7
1341# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1342# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1343movdqa %xmm7,%xmm8
1344
1345# qhasm:         xmm8 = xmm1
1346# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1347# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1348movdqa %xmm1,%xmm9
1349
1350# qhasm:         xmm12 ^= xmm4
1351# asm 1: pxor  <xmm4=int6464#5,<xmm12=int6464#9
1352# asm 2: pxor  <xmm4=%xmm4,<xmm12=%xmm8
1353pxor  %xmm4,%xmm8
1354
1355# qhasm:         xmm8 ^= xmm2
1356# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#10
1357# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm9
1358pxor  %xmm2,%xmm9
1359
1360# qhasm:           xmm11 = xmm15
1361# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1362# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1363movdqa %xmm13,%xmm10
1364
1365# qhasm:           xmm11 ^= xmm14
1366# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
1367# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
1368pxor  %xmm11,%xmm10
1369
1370# qhasm:           xmm11 &= xmm12
1371# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
1372# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
1373pand  %xmm8,%xmm10
1374
1375# qhasm:           xmm12 ^= xmm8
1376# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
1377# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
1378pxor  %xmm9,%xmm8
1379
1380# qhasm:           xmm12 &= xmm14
1381# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
1382# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
1383pand  %xmm11,%xmm8
1384
1385# qhasm:           xmm8 &= xmm15
1386# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
1387# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
1388pand  %xmm13,%xmm9
1389
1390# qhasm:           xmm8 ^= xmm12
1391# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
1392# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
1393pxor  %xmm8,%xmm9
1394
1395# qhasm:           xmm12 ^= xmm11
1396# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
1397# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
1398pxor  %xmm10,%xmm8
1399
1400# qhasm:           xmm10 = xmm13
1401# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1402# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1403movdqa %xmm15,%xmm10
1404
1405# qhasm:           xmm10 ^= xmm9
1406# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
1407# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
1408pxor  %xmm12,%xmm10
1409
1410# qhasm:           xmm10 &= xmm4
1411# asm 1: pand  <xmm4=int6464#5,<xmm10=int6464#11
1412# asm 2: pand  <xmm4=%xmm4,<xmm10=%xmm10
1413pand  %xmm4,%xmm10
1414
1415# qhasm:           xmm4 ^= xmm2
1416# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
1417# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
1418pxor  %xmm2,%xmm4
1419
1420# qhasm:           xmm4 &= xmm9
1421# asm 1: pand  <xmm9=int6464#13,<xmm4=int6464#5
1422# asm 2: pand  <xmm9=%xmm12,<xmm4=%xmm4
1423pand  %xmm12,%xmm4
1424
1425# qhasm:           xmm2 &= xmm13
1426# asm 1: pand  <xmm13=int6464#16,<xmm2=int6464#3
1427# asm 2: pand  <xmm13=%xmm15,<xmm2=%xmm2
1428pand  %xmm15,%xmm2
1429
1430# qhasm:           xmm4 ^= xmm2
1431# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
1432# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
1433pxor  %xmm2,%xmm4
1434
1435# qhasm:           xmm2 ^= xmm10
1436# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#3
1437# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm2
1438pxor  %xmm10,%xmm2
1439
1440# qhasm:         xmm15 ^= xmm13
1441# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
1442# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
1443pxor  %xmm15,%xmm13
1444
1445# qhasm:         xmm14 ^= xmm9
1446# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
1447# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
1448pxor  %xmm12,%xmm11
1449
1450# qhasm:           xmm11 = xmm15
1451# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1452# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1453movdqa %xmm13,%xmm10
1454
1455# qhasm:           xmm11 ^= xmm14
1456# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
1457# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
1458pxor  %xmm11,%xmm10
1459
1460# qhasm:           xmm11 &= xmm7
1461# asm 1: pand  <xmm7=int6464#8,<xmm11=int6464#11
1462# asm 2: pand  <xmm7=%xmm7,<xmm11=%xmm10
1463pand  %xmm7,%xmm10
1464
1465# qhasm:           xmm7 ^= xmm1
1466# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
1467# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
1468pxor  %xmm1,%xmm7
1469
1470# qhasm:           xmm7 &= xmm14
1471# asm 1: pand  <xmm14=int6464#12,<xmm7=int6464#8
1472# asm 2: pand  <xmm14=%xmm11,<xmm7=%xmm7
1473pand  %xmm11,%xmm7
1474
1475# qhasm:           xmm1 &= xmm15
1476# asm 1: pand  <xmm15=int6464#14,<xmm1=int6464#2
1477# asm 2: pand  <xmm15=%xmm13,<xmm1=%xmm1
1478pand  %xmm13,%xmm1
1479
1480# qhasm:           xmm7 ^= xmm1
1481# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
1482# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
1483pxor  %xmm1,%xmm7
1484
1485# qhasm:           xmm1 ^= xmm11
1486# asm 1: pxor  <xmm11=int6464#11,<xmm1=int6464#2
1487# asm 2: pxor  <xmm11=%xmm10,<xmm1=%xmm1
1488pxor  %xmm10,%xmm1
1489
1490# qhasm:         xmm7 ^= xmm12
1491# asm 1: pxor  <xmm12=int6464#9,<xmm7=int6464#8
1492# asm 2: pxor  <xmm12=%xmm8,<xmm7=%xmm7
1493pxor  %xmm8,%xmm7
1494
1495# qhasm:         xmm4 ^= xmm12
1496# asm 1: pxor  <xmm12=int6464#9,<xmm4=int6464#5
1497# asm 2: pxor  <xmm12=%xmm8,<xmm4=%xmm4
1498pxor  %xmm8,%xmm4
1499
1500# qhasm:         xmm1 ^= xmm8
1501# asm 1: pxor  <xmm8=int6464#10,<xmm1=int6464#2
1502# asm 2: pxor  <xmm8=%xmm9,<xmm1=%xmm1
1503pxor  %xmm9,%xmm1
1504
1505# qhasm:         xmm2 ^= xmm8
1506# asm 1: pxor  <xmm8=int6464#10,<xmm2=int6464#3
1507# asm 2: pxor  <xmm8=%xmm9,<xmm2=%xmm2
1508pxor  %xmm9,%xmm2
1509
1510# qhasm:       xmm7 ^= xmm0
1511# asm 1: pxor  <xmm0=int6464#1,<xmm7=int6464#8
1512# asm 2: pxor  <xmm0=%xmm0,<xmm7=%xmm7
1513pxor  %xmm0,%xmm7
1514
1515# qhasm:       xmm1 ^= xmm6
1516# asm 1: pxor  <xmm6=int6464#7,<xmm1=int6464#2
1517# asm 2: pxor  <xmm6=%xmm6,<xmm1=%xmm1
1518pxor  %xmm6,%xmm1
1519
1520# qhasm:       xmm4 ^= xmm7
1521# asm 1: pxor  <xmm7=int6464#8,<xmm4=int6464#5
1522# asm 2: pxor  <xmm7=%xmm7,<xmm4=%xmm4
1523pxor  %xmm7,%xmm4
1524
1525# qhasm:       xmm6 ^= xmm0
1526# asm 1: pxor  <xmm0=int6464#1,<xmm6=int6464#7
1527# asm 2: pxor  <xmm0=%xmm0,<xmm6=%xmm6
1528pxor  %xmm0,%xmm6
1529
1530# qhasm:       xmm0 ^= xmm1
1531# asm 1: pxor  <xmm1=int6464#2,<xmm0=int6464#1
1532# asm 2: pxor  <xmm1=%xmm1,<xmm0=%xmm0
1533pxor  %xmm1,%xmm0
1534
1535# qhasm:       xmm1 ^= xmm5
1536# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
1537# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
1538pxor  %xmm5,%xmm1
1539
1540# qhasm:       xmm5 ^= xmm2
1541# asm 1: pxor  <xmm2=int6464#3,<xmm5=int6464#6
1542# asm 2: pxor  <xmm2=%xmm2,<xmm5=%xmm5
1543pxor  %xmm2,%xmm5
1544
1545# qhasm:       xmm4 ^= xmm5
1546# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
1547# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
1548pxor  %xmm5,%xmm4
1549
1550# qhasm:       xmm2 ^= xmm3
1551# asm 1: pxor  <xmm3=int6464#4,<xmm2=int6464#3
1552# asm 2: pxor  <xmm3=%xmm3,<xmm2=%xmm2
1553pxor  %xmm3,%xmm2
1554
1555# qhasm:       xmm3 ^= xmm5
1556# asm 1: pxor  <xmm5=int6464#6,<xmm3=int6464#4
1557# asm 2: pxor  <xmm5=%xmm5,<xmm3=%xmm3
1558pxor  %xmm5,%xmm3
1559
1560# qhasm:       xmm6 ^= xmm3
1561# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
1562# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
1563pxor  %xmm3,%xmm6
1564
1565# qhasm:     xmm8 = shuffle dwords of xmm0 by 0x93
1566# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
1567# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
1568pshufd $0x93,%xmm0,%xmm8
1569
1570# qhasm:     xmm9 = shuffle dwords of xmm1 by 0x93
1571# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
1572# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
1573pshufd $0x93,%xmm1,%xmm9
1574
1575# qhasm:     xmm10 = shuffle dwords of xmm4 by 0x93
1576# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
1577# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
1578pshufd $0x93,%xmm4,%xmm10
1579
1580# qhasm:     xmm11 = shuffle dwords of xmm6 by 0x93
1581# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
1582# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
1583pshufd $0x93,%xmm6,%xmm11
1584
1585# qhasm:     xmm12 = shuffle dwords of xmm3 by 0x93
1586# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
1587# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
1588pshufd $0x93,%xmm3,%xmm12
1589
1590# qhasm:     xmm13 = shuffle dwords of xmm7 by 0x93
1591# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
1592# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
1593pshufd $0x93,%xmm7,%xmm13
1594
1595# qhasm:     xmm14 = shuffle dwords of xmm2 by 0x93
1596# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
1597# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
1598pshufd $0x93,%xmm2,%xmm14
1599
1600# qhasm:     xmm15 = shuffle dwords of xmm5 by 0x93
1601# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
1602# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
1603pshufd $0x93,%xmm5,%xmm15
1604
1605# qhasm:     xmm0 ^= xmm8
1606# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
1607# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
1608pxor  %xmm8,%xmm0
1609
1610# qhasm:     xmm1 ^= xmm9
1611# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
1612# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
1613pxor  %xmm9,%xmm1
1614
1615# qhasm:     xmm4 ^= xmm10
1616# asm 1: pxor  <xmm10=int6464#11,<xmm4=int6464#5
1617# asm 2: pxor  <xmm10=%xmm10,<xmm4=%xmm4
1618pxor  %xmm10,%xmm4
1619
1620# qhasm:     xmm6 ^= xmm11
1621# asm 1: pxor  <xmm11=int6464#12,<xmm6=int6464#7
1622# asm 2: pxor  <xmm11=%xmm11,<xmm6=%xmm6
1623pxor  %xmm11,%xmm6
1624
1625# qhasm:     xmm3 ^= xmm12
1626# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
1627# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
1628pxor  %xmm12,%xmm3
1629
1630# qhasm:     xmm7 ^= xmm13
1631# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
1632# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
1633pxor  %xmm13,%xmm7
1634
1635# qhasm:     xmm2 ^= xmm14
1636# asm 1: pxor  <xmm14=int6464#15,<xmm2=int6464#3
1637# asm 2: pxor  <xmm14=%xmm14,<xmm2=%xmm2
1638pxor  %xmm14,%xmm2
1639
1640# qhasm:     xmm5 ^= xmm15
1641# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
1642# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
1643pxor  %xmm15,%xmm5
1644
1645# qhasm:     xmm8 ^= xmm5
1646# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
1647# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
1648pxor  %xmm5,%xmm8
1649
1650# qhasm:     xmm9 ^= xmm0
1651# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
1652# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
1653pxor  %xmm0,%xmm9
1654
1655# qhasm:     xmm10 ^= xmm1
1656# asm 1: pxor  <xmm1=int6464#2,<xmm10=int6464#11
1657# asm 2: pxor  <xmm1=%xmm1,<xmm10=%xmm10
1658pxor  %xmm1,%xmm10
1659
1660# qhasm:     xmm9 ^= xmm5
1661# asm 1: pxor  <xmm5=int6464#6,<xmm9=int6464#10
1662# asm 2: pxor  <xmm5=%xmm5,<xmm9=%xmm9
1663pxor  %xmm5,%xmm9
1664
1665# qhasm:     xmm11 ^= xmm4
1666# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
1667# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
1668pxor  %xmm4,%xmm11
1669
1670# qhasm:     xmm12 ^= xmm6
1671# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#13
1672# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm12
1673pxor  %xmm6,%xmm12
1674
1675# qhasm:     xmm13 ^= xmm3
1676# asm 1: pxor  <xmm3=int6464#4,<xmm13=int6464#14
1677# asm 2: pxor  <xmm3=%xmm3,<xmm13=%xmm13
1678pxor  %xmm3,%xmm13
1679
1680# qhasm:     xmm11 ^= xmm5
1681# asm 1: pxor  <xmm5=int6464#6,<xmm11=int6464#12
1682# asm 2: pxor  <xmm5=%xmm5,<xmm11=%xmm11
1683pxor  %xmm5,%xmm11
1684
1685# qhasm:     xmm14 ^= xmm7
1686# asm 1: pxor  <xmm7=int6464#8,<xmm14=int6464#15
1687# asm 2: pxor  <xmm7=%xmm7,<xmm14=%xmm14
1688pxor  %xmm7,%xmm14
1689
1690# qhasm:     xmm15 ^= xmm2
1691# asm 1: pxor  <xmm2=int6464#3,<xmm15=int6464#16
1692# asm 2: pxor  <xmm2=%xmm2,<xmm15=%xmm15
1693pxor  %xmm2,%xmm15
1694
1695# qhasm:     xmm12 ^= xmm5
1696# asm 1: pxor  <xmm5=int6464#6,<xmm12=int6464#13
1697# asm 2: pxor  <xmm5=%xmm5,<xmm12=%xmm12
1698pxor  %xmm5,%xmm12
1699
1700# qhasm:     xmm0 = shuffle dwords of xmm0 by 0x4E
1701# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
1702# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
1703pshufd $0x4E,%xmm0,%xmm0
1704
1705# qhasm:     xmm1 = shuffle dwords of xmm1 by 0x4E
1706# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
1707# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
1708pshufd $0x4E,%xmm1,%xmm1
1709
1710# qhasm:     xmm4 = shuffle dwords of xmm4 by 0x4E
1711# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
1712# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
1713pshufd $0x4E,%xmm4,%xmm4
1714
1715# qhasm:     xmm6 = shuffle dwords of xmm6 by 0x4E
1716# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
1717# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
1718pshufd $0x4E,%xmm6,%xmm6
1719
1720# qhasm:     xmm3 = shuffle dwords of xmm3 by 0x4E
1721# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
1722# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
1723pshufd $0x4E,%xmm3,%xmm3
1724
1725# qhasm:     xmm7 = shuffle dwords of xmm7 by 0x4E
1726# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
1727# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
1728pshufd $0x4E,%xmm7,%xmm7
1729
1730# qhasm:     xmm2 = shuffle dwords of xmm2 by 0x4E
1731# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
1732# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
1733pshufd $0x4E,%xmm2,%xmm2
1734
1735# qhasm:     xmm5 = shuffle dwords of xmm5 by 0x4E
1736# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
1737# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
1738pshufd $0x4E,%xmm5,%xmm5
1739
1740# qhasm:     xmm8 ^= xmm0
1741# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
1742# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
1743pxor  %xmm0,%xmm8
1744
1745# qhasm:     xmm9 ^= xmm1
1746# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
1747# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
1748pxor  %xmm1,%xmm9
1749
1750# qhasm:     xmm10 ^= xmm4
1751# asm 1: pxor  <xmm4=int6464#5,<xmm10=int6464#11
1752# asm 2: pxor  <xmm4=%xmm4,<xmm10=%xmm10
1753pxor  %xmm4,%xmm10
1754
1755# qhasm:     xmm11 ^= xmm6
1756# asm 1: pxor  <xmm6=int6464#7,<xmm11=int6464#12
1757# asm 2: pxor  <xmm6=%xmm6,<xmm11=%xmm11
1758pxor  %xmm6,%xmm11
1759
1760# qhasm:     xmm12 ^= xmm3
1761# asm 1: pxor  <xmm3=int6464#4,<xmm12=int6464#13
1762# asm 2: pxor  <xmm3=%xmm3,<xmm12=%xmm12
1763pxor  %xmm3,%xmm12
1764
1765# qhasm:     xmm13 ^= xmm7
1766# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
1767# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
1768pxor  %xmm7,%xmm13
1769
1770# qhasm:     xmm14 ^= xmm2
1771# asm 1: pxor  <xmm2=int6464#3,<xmm14=int6464#15
1772# asm 2: pxor  <xmm2=%xmm2,<xmm14=%xmm14
1773pxor  %xmm2,%xmm14
1774
1775# qhasm:     xmm15 ^= xmm5
1776# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
1777# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
1778pxor  %xmm5,%xmm15
1779
1780# qhasm:     xmm8 ^= *(int128 *)(c + 128)
1781# asm 1: pxor 128(<c=int64#4),<xmm8=int6464#9
1782# asm 2: pxor 128(<c=%rcx),<xmm8=%xmm8
1783pxor 128(%rcx),%xmm8
1784
1785# qhasm:     shuffle bytes of xmm8 by SR
1786# asm 1: pshufb SR,<xmm8=int6464#9
1787# asm 2: pshufb SR,<xmm8=%xmm8
1788pshufb SR,%xmm8
1789
1790# qhasm:     xmm9 ^= *(int128 *)(c + 144)
1791# asm 1: pxor 144(<c=int64#4),<xmm9=int6464#10
1792# asm 2: pxor 144(<c=%rcx),<xmm9=%xmm9
1793pxor 144(%rcx),%xmm9
1794
1795# qhasm:     shuffle bytes of xmm9 by SR
1796# asm 1: pshufb SR,<xmm9=int6464#10
1797# asm 2: pshufb SR,<xmm9=%xmm9
1798pshufb SR,%xmm9
1799
1800# qhasm:     xmm10 ^= *(int128 *)(c + 160)
1801# asm 1: pxor 160(<c=int64#4),<xmm10=int6464#11
1802# asm 2: pxor 160(<c=%rcx),<xmm10=%xmm10
1803pxor 160(%rcx),%xmm10
1804
1805# qhasm:     shuffle bytes of xmm10 by SR
1806# asm 1: pshufb SR,<xmm10=int6464#11
1807# asm 2: pshufb SR,<xmm10=%xmm10
1808pshufb SR,%xmm10
1809
1810# qhasm:     xmm11 ^= *(int128 *)(c + 176)
1811# asm 1: pxor 176(<c=int64#4),<xmm11=int6464#12
1812# asm 2: pxor 176(<c=%rcx),<xmm11=%xmm11
1813pxor 176(%rcx),%xmm11
1814
1815# qhasm:     shuffle bytes of xmm11 by SR
1816# asm 1: pshufb SR,<xmm11=int6464#12
1817# asm 2: pshufb SR,<xmm11=%xmm11
1818pshufb SR,%xmm11
1819
1820# qhasm:     xmm12 ^= *(int128 *)(c + 192)
1821# asm 1: pxor 192(<c=int64#4),<xmm12=int6464#13
1822# asm 2: pxor 192(<c=%rcx),<xmm12=%xmm12
1823pxor 192(%rcx),%xmm12
1824
1825# qhasm:     shuffle bytes of xmm12 by SR
1826# asm 1: pshufb SR,<xmm12=int6464#13
1827# asm 2: pshufb SR,<xmm12=%xmm12
1828pshufb SR,%xmm12
1829
1830# qhasm:     xmm13 ^= *(int128 *)(c + 208)
1831# asm 1: pxor 208(<c=int64#4),<xmm13=int6464#14
1832# asm 2: pxor 208(<c=%rcx),<xmm13=%xmm13
1833pxor 208(%rcx),%xmm13
1834
1835# qhasm:     shuffle bytes of xmm13 by SR
1836# asm 1: pshufb SR,<xmm13=int6464#14
1837# asm 2: pshufb SR,<xmm13=%xmm13
1838pshufb SR,%xmm13
1839
1840# qhasm:     xmm14 ^= *(int128 *)(c + 224)
1841# asm 1: pxor 224(<c=int64#4),<xmm14=int6464#15
1842# asm 2: pxor 224(<c=%rcx),<xmm14=%xmm14
1843pxor 224(%rcx),%xmm14
1844
1845# qhasm:     shuffle bytes of xmm14 by SR
1846# asm 1: pshufb SR,<xmm14=int6464#15
1847# asm 2: pshufb SR,<xmm14=%xmm14
1848pshufb SR,%xmm14
1849
1850# qhasm:     xmm15 ^= *(int128 *)(c + 240)
1851# asm 1: pxor 240(<c=int64#4),<xmm15=int6464#16
1852# asm 2: pxor 240(<c=%rcx),<xmm15=%xmm15
1853pxor 240(%rcx),%xmm15
1854
1855# qhasm:     shuffle bytes of xmm15 by SR
1856# asm 1: pshufb SR,<xmm15=int6464#16
1857# asm 2: pshufb SR,<xmm15=%xmm15
1858pshufb SR,%xmm15
1859
1860# qhasm:       xmm13 ^= xmm14
1861# asm 1: pxor  <xmm14=int6464#15,<xmm13=int6464#14
1862# asm 2: pxor  <xmm14=%xmm14,<xmm13=%xmm13
1863pxor  %xmm14,%xmm13
1864
1865# qhasm:       xmm10 ^= xmm9
1866# asm 1: pxor  <xmm9=int6464#10,<xmm10=int6464#11
1867# asm 2: pxor  <xmm9=%xmm9,<xmm10=%xmm10
1868pxor  %xmm9,%xmm10
1869
1870# qhasm:       xmm13 ^= xmm8
1871# asm 1: pxor  <xmm8=int6464#9,<xmm13=int6464#14
1872# asm 2: pxor  <xmm8=%xmm8,<xmm13=%xmm13
1873pxor  %xmm8,%xmm13
1874
1875# qhasm:       xmm14 ^= xmm10
1876# asm 1: pxor  <xmm10=int6464#11,<xmm14=int6464#15
1877# asm 2: pxor  <xmm10=%xmm10,<xmm14=%xmm14
1878pxor  %xmm10,%xmm14
1879
1880# qhasm:       xmm11 ^= xmm8
1881# asm 1: pxor  <xmm8=int6464#9,<xmm11=int6464#12
1882# asm 2: pxor  <xmm8=%xmm8,<xmm11=%xmm11
1883pxor  %xmm8,%xmm11
1884
1885# qhasm:       xmm14 ^= xmm11
1886# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
1887# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
1888pxor  %xmm11,%xmm14
1889
1890# qhasm:       xmm11 ^= xmm15
1891# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#12
1892# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm11
1893pxor  %xmm15,%xmm11
1894
1895# qhasm:       xmm11 ^= xmm12
1896# asm 1: pxor  <xmm12=int6464#13,<xmm11=int6464#12
1897# asm 2: pxor  <xmm12=%xmm12,<xmm11=%xmm11
1898pxor  %xmm12,%xmm11
1899
1900# qhasm:       xmm15 ^= xmm13
1901# asm 1: pxor  <xmm13=int6464#14,<xmm15=int6464#16
1902# asm 2: pxor  <xmm13=%xmm13,<xmm15=%xmm15
1903pxor  %xmm13,%xmm15
1904
1905# qhasm:       xmm11 ^= xmm9
1906# asm 1: pxor  <xmm9=int6464#10,<xmm11=int6464#12
1907# asm 2: pxor  <xmm9=%xmm9,<xmm11=%xmm11
1908pxor  %xmm9,%xmm11
1909
1910# qhasm:       xmm12 ^= xmm13
1911# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
1912# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
1913pxor  %xmm13,%xmm12
1914
1915# qhasm:       xmm10 ^= xmm15
1916# asm 1: pxor  <xmm15=int6464#16,<xmm10=int6464#11
1917# asm 2: pxor  <xmm15=%xmm15,<xmm10=%xmm10
1918pxor  %xmm15,%xmm10
1919
1920# qhasm:       xmm9 ^= xmm13
1921# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
1922# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
1923pxor  %xmm13,%xmm9
1924
1925# qhasm:       xmm3 = xmm15
1926# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
1927# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
1928movdqa %xmm15,%xmm0
1929
1930# qhasm:       xmm2 = xmm9
1931# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
1932# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
1933movdqa %xmm9,%xmm1
1934
1935# qhasm:       xmm1 = xmm13
1936# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
1937# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
1938movdqa %xmm13,%xmm2
1939
1940# qhasm:       xmm5 = xmm10
1941# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
1942# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
1943movdqa %xmm10,%xmm3
1944
1945# qhasm:       xmm4 = xmm14
1946# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
1947# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
1948movdqa %xmm14,%xmm4
1949
1950# qhasm:       xmm3 ^= xmm12
1951# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#1
1952# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm0
1953pxor  %xmm12,%xmm0
1954
1955# qhasm:       xmm2 ^= xmm10
1956# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#2
1957# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm1
1958pxor  %xmm10,%xmm1
1959
1960# qhasm:       xmm1 ^= xmm11
1961# asm 1: pxor  <xmm11=int6464#12,<xmm1=int6464#3
1962# asm 2: pxor  <xmm11=%xmm11,<xmm1=%xmm2
1963pxor  %xmm11,%xmm2
1964
1965# qhasm:       xmm5 ^= xmm12
1966# asm 1: pxor  <xmm12=int6464#13,<xmm5=int6464#4
1967# asm 2: pxor  <xmm12=%xmm12,<xmm5=%xmm3
1968pxor  %xmm12,%xmm3
1969
1970# qhasm:       xmm4 ^= xmm8
1971# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
1972# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
1973pxor  %xmm8,%xmm4
1974
1975# qhasm:       xmm6 = xmm3
1976# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
1977# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
1978movdqa %xmm0,%xmm5
1979
1980# qhasm:       xmm0 = xmm2
1981# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
1982# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
1983movdqa %xmm1,%xmm6
1984
1985# qhasm:       xmm7 = xmm3
1986# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
1987# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
1988movdqa %xmm0,%xmm7
1989
1990# qhasm:       xmm2 |= xmm1
1991# asm 1: por   <xmm1=int6464#3,<xmm2=int6464#2
1992# asm 2: por   <xmm1=%xmm2,<xmm2=%xmm1
1993por   %xmm2,%xmm1
1994
1995# qhasm:       xmm3 |= xmm4
1996# asm 1: por   <xmm4=int6464#5,<xmm3=int6464#1
1997# asm 2: por   <xmm4=%xmm4,<xmm3=%xmm0
1998por   %xmm4,%xmm0
1999
2000# qhasm:       xmm7 ^= xmm0
2001# asm 1: pxor  <xmm0=int6464#7,<xmm7=int6464#8
2002# asm 2: pxor  <xmm0=%xmm6,<xmm7=%xmm7
2003pxor  %xmm6,%xmm7
2004
2005# qhasm:       xmm6 &= xmm4
2006# asm 1: pand  <xmm4=int6464#5,<xmm6=int6464#6
2007# asm 2: pand  <xmm4=%xmm4,<xmm6=%xmm5
2008pand  %xmm4,%xmm5
2009
2010# qhasm:       xmm0 &= xmm1
2011# asm 1: pand  <xmm1=int6464#3,<xmm0=int6464#7
2012# asm 2: pand  <xmm1=%xmm2,<xmm0=%xmm6
2013pand  %xmm2,%xmm6
2014
2015# qhasm:       xmm4 ^= xmm1
2016# asm 1: pxor  <xmm1=int6464#3,<xmm4=int6464#5
2017# asm 2: pxor  <xmm1=%xmm2,<xmm4=%xmm4
2018pxor  %xmm2,%xmm4
2019
2020# qhasm:       xmm7 &= xmm4
2021# asm 1: pand  <xmm4=int6464#5,<xmm7=int6464#8
2022# asm 2: pand  <xmm4=%xmm4,<xmm7=%xmm7
2023pand  %xmm4,%xmm7
2024
2025# qhasm:       xmm4 = xmm11
2026# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
2027# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
2028movdqa %xmm11,%xmm2
2029
2030# qhasm:       xmm4 ^= xmm8
2031# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#3
2032# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm2
2033pxor  %xmm8,%xmm2
2034
2035# qhasm:       xmm5 &= xmm4
2036# asm 1: pand  <xmm4=int6464#3,<xmm5=int6464#4
2037# asm 2: pand  <xmm4=%xmm2,<xmm5=%xmm3
2038pand  %xmm2,%xmm3
2039
2040# qhasm:       xmm3 ^= xmm5
2041# asm 1: pxor  <xmm5=int6464#4,<xmm3=int6464#1
2042# asm 2: pxor  <xmm5=%xmm3,<xmm3=%xmm0
2043pxor  %xmm3,%xmm0
2044
2045# qhasm:       xmm2 ^= xmm5
2046# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
2047# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
2048pxor  %xmm3,%xmm1
2049
2050# qhasm:       xmm5 = xmm15
2051# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
2052# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
2053movdqa %xmm15,%xmm2
2054
2055# qhasm:       xmm5 ^= xmm9
2056# asm 1: pxor  <xmm9=int6464#10,<xmm5=int6464#3
2057# asm 2: pxor  <xmm9=%xmm9,<xmm5=%xmm2
2058pxor  %xmm9,%xmm2
2059
2060# qhasm:       xmm4 = xmm13
2061# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
2062# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
2063movdqa %xmm13,%xmm3
2064
2065# qhasm:       xmm1 = xmm5
2066# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
2067# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
2068movdqa %xmm2,%xmm4
2069
2070# qhasm:       xmm4 ^= xmm14
2071# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#4
2072# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm3
2073pxor  %xmm14,%xmm3
2074
2075# qhasm:       xmm1 |= xmm4
2076# asm 1: por   <xmm4=int6464#4,<xmm1=int6464#5
2077# asm 2: por   <xmm4=%xmm3,<xmm1=%xmm4
2078por   %xmm3,%xmm4
2079
2080# qhasm:       xmm5 &= xmm4
2081# asm 1: pand  <xmm4=int6464#4,<xmm5=int6464#3
2082# asm 2: pand  <xmm4=%xmm3,<xmm5=%xmm2
2083pand  %xmm3,%xmm2
2084
2085# qhasm:       xmm0 ^= xmm5
2086# asm 1: pxor  <xmm5=int6464#3,<xmm0=int6464#7
2087# asm 2: pxor  <xmm5=%xmm2,<xmm0=%xmm6
2088pxor  %xmm2,%xmm6
2089
2090# qhasm:       xmm3 ^= xmm7
2091# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#1
2092# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm0
2093pxor  %xmm7,%xmm0
2094
2095# qhasm:       xmm2 ^= xmm6
2096# asm 1: pxor  <xmm6=int6464#6,<xmm2=int6464#2
2097# asm 2: pxor  <xmm6=%xmm5,<xmm2=%xmm1
2098pxor  %xmm5,%xmm1
2099
2100# qhasm:       xmm1 ^= xmm7
2101# asm 1: pxor  <xmm7=int6464#8,<xmm1=int6464#5
2102# asm 2: pxor  <xmm7=%xmm7,<xmm1=%xmm4
2103pxor  %xmm7,%xmm4
2104
2105# qhasm:       xmm0 ^= xmm6
2106# asm 1: pxor  <xmm6=int6464#6,<xmm0=int6464#7
2107# asm 2: pxor  <xmm6=%xmm5,<xmm0=%xmm6
2108pxor  %xmm5,%xmm6
2109
2110# qhasm:       xmm1 ^= xmm6
2111# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
2112# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
2113pxor  %xmm5,%xmm4
2114
2115# qhasm:       xmm4 = xmm10
2116# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
2117# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
2118movdqa %xmm10,%xmm2
2119
2120# qhasm:       xmm5 = xmm12
2121# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
2122# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
2123movdqa %xmm12,%xmm3
2124
2125# qhasm:       xmm6 = xmm9
2126# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
2127# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
2128movdqa %xmm9,%xmm5
2129
2130# qhasm:       xmm7 = xmm15
2131# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
2132# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
2133movdqa %xmm15,%xmm7
2134
2135# qhasm:       xmm4 &= xmm11
2136# asm 1: pand  <xmm11=int6464#12,<xmm4=int6464#3
2137# asm 2: pand  <xmm11=%xmm11,<xmm4=%xmm2
2138pand  %xmm11,%xmm2
2139
2140# qhasm:       xmm5 &= xmm8
2141# asm 1: pand  <xmm8=int6464#9,<xmm5=int6464#4
2142# asm 2: pand  <xmm8=%xmm8,<xmm5=%xmm3
2143pand  %xmm8,%xmm3
2144
2145# qhasm:       xmm6 &= xmm13
2146# asm 1: pand  <xmm13=int6464#14,<xmm6=int6464#6
2147# asm 2: pand  <xmm13=%xmm13,<xmm6=%xmm5
2148pand  %xmm13,%xmm5
2149
2150# qhasm:       xmm7 |= xmm14
2151# asm 1: por   <xmm14=int6464#15,<xmm7=int6464#8
2152# asm 2: por   <xmm14=%xmm14,<xmm7=%xmm7
2153por   %xmm14,%xmm7
2154
2155# qhasm:       xmm3 ^= xmm4
2156# asm 1: pxor  <xmm4=int6464#3,<xmm3=int6464#1
2157# asm 2: pxor  <xmm4=%xmm2,<xmm3=%xmm0
2158pxor  %xmm2,%xmm0
2159
2160# qhasm:       xmm2 ^= xmm5
2161# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
2162# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
2163pxor  %xmm3,%xmm1
2164
2165# qhasm:       xmm1 ^= xmm6
2166# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
2167# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
2168pxor  %xmm5,%xmm4
2169
2170# qhasm:       xmm0 ^= xmm7
2171# asm 1: pxor  <xmm7=int6464#8,<xmm0=int6464#7
2172# asm 2: pxor  <xmm7=%xmm7,<xmm0=%xmm6
2173pxor  %xmm7,%xmm6
2174
2175# qhasm:       xmm4 = xmm3
2176# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
2177# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
2178movdqa %xmm0,%xmm2
2179
2180# qhasm:       xmm4 ^= xmm2
2181# asm 1: pxor  <xmm2=int6464#2,<xmm4=int6464#3
2182# asm 2: pxor  <xmm2=%xmm1,<xmm4=%xmm2
2183pxor  %xmm1,%xmm2
2184
2185# qhasm:       xmm3 &= xmm1
2186# asm 1: pand  <xmm1=int6464#5,<xmm3=int6464#1
2187# asm 2: pand  <xmm1=%xmm4,<xmm3=%xmm0
2188pand  %xmm4,%xmm0
2189
2190# qhasm:       xmm6 = xmm0
2191# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
2192# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
2193movdqa %xmm6,%xmm3
2194
2195# qhasm:       xmm6 ^= xmm3
2196# asm 1: pxor  <xmm3=int6464#1,<xmm6=int6464#4
2197# asm 2: pxor  <xmm3=%xmm0,<xmm6=%xmm3
2198pxor  %xmm0,%xmm3
2199
2200# qhasm:       xmm7 = xmm4
2201# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
2202# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
2203movdqa %xmm2,%xmm5
2204
2205# qhasm:       xmm7 &= xmm6
2206# asm 1: pand  <xmm6=int6464#4,<xmm7=int6464#6
2207# asm 2: pand  <xmm6=%xmm3,<xmm7=%xmm5
2208pand  %xmm3,%xmm5
2209
2210# qhasm:       xmm7 ^= xmm2
2211# asm 1: pxor  <xmm2=int6464#2,<xmm7=int6464#6
2212# asm 2: pxor  <xmm2=%xmm1,<xmm7=%xmm5
2213pxor  %xmm1,%xmm5
2214
2215# qhasm:       xmm5 = xmm1
2216# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
2217# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
2218movdqa %xmm4,%xmm7
2219
2220# qhasm:       xmm5 ^= xmm0
2221# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
2222# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
2223pxor  %xmm6,%xmm7
2224
2225# qhasm:       xmm3 ^= xmm2
2226# asm 1: pxor  <xmm2=int6464#2,<xmm3=int6464#1
2227# asm 2: pxor  <xmm2=%xmm1,<xmm3=%xmm0
2228pxor  %xmm1,%xmm0
2229
2230# qhasm:       xmm5 &= xmm3
2231# asm 1: pand  <xmm3=int6464#1,<xmm5=int6464#8
2232# asm 2: pand  <xmm3=%xmm0,<xmm5=%xmm7
2233pand  %xmm0,%xmm7
2234
2235# qhasm:       xmm5 ^= xmm0
2236# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
2237# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
2238pxor  %xmm6,%xmm7
2239
2240# qhasm:       xmm1 ^= xmm5
2241# asm 1: pxor  <xmm5=int6464#8,<xmm1=int6464#5
2242# asm 2: pxor  <xmm5=%xmm7,<xmm1=%xmm4
2243pxor  %xmm7,%xmm4
2244
2245# qhasm:       xmm2 = xmm6
2246# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
2247# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
2248movdqa %xmm3,%xmm0
2249
2250# qhasm:       xmm2 ^= xmm5
2251# asm 1: pxor  <xmm5=int6464#8,<xmm2=int6464#1
2252# asm 2: pxor  <xmm5=%xmm7,<xmm2=%xmm0
2253pxor  %xmm7,%xmm0
2254
2255# qhasm:       xmm2 &= xmm0
2256# asm 1: pand  <xmm0=int6464#7,<xmm2=int6464#1
2257# asm 2: pand  <xmm0=%xmm6,<xmm2=%xmm0
2258pand  %xmm6,%xmm0
2259
2260# qhasm:       xmm1 ^= xmm2
2261# asm 1: pxor  <xmm2=int6464#1,<xmm1=int6464#5
2262# asm 2: pxor  <xmm2=%xmm0,<xmm1=%xmm4
2263pxor  %xmm0,%xmm4
2264
2265# qhasm:       xmm6 ^= xmm2
2266# asm 1: pxor  <xmm2=int6464#1,<xmm6=int6464#4
2267# asm 2: pxor  <xmm2=%xmm0,<xmm6=%xmm3
2268pxor  %xmm0,%xmm3
2269
2270# qhasm:       xmm6 &= xmm7
2271# asm 1: pand  <xmm7=int6464#6,<xmm6=int6464#4
2272# asm 2: pand  <xmm7=%xmm5,<xmm6=%xmm3
2273pand  %xmm5,%xmm3
2274
2275# qhasm:       xmm6 ^= xmm4
2276# asm 1: pxor  <xmm4=int6464#3,<xmm6=int6464#4
2277# asm 2: pxor  <xmm4=%xmm2,<xmm6=%xmm3
2278pxor  %xmm2,%xmm3
2279
2280# qhasm:         xmm4 = xmm14
2281# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
2282# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
2283movdqa %xmm14,%xmm0
2284
2285# qhasm:         xmm0 = xmm13
2286# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
2287# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
2288movdqa %xmm13,%xmm1
2289
2290# qhasm:           xmm2 = xmm7
2291# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
2292# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
2293movdqa %xmm5,%xmm2
2294
2295# qhasm:           xmm2 ^= xmm6
2296# asm 1: pxor  <xmm6=int6464#4,<xmm2=int6464#3
2297# asm 2: pxor  <xmm6=%xmm3,<xmm2=%xmm2
2298pxor  %xmm3,%xmm2
2299
2300# qhasm:           xmm2 &= xmm14
2301# asm 1: pand  <xmm14=int6464#15,<xmm2=int6464#3
2302# asm 2: pand  <xmm14=%xmm14,<xmm2=%xmm2
2303pand  %xmm14,%xmm2
2304
2305# qhasm:           xmm14 ^= xmm13
2306# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
2307# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
2308pxor  %xmm13,%xmm14
2309
2310# qhasm:           xmm14 &= xmm6
2311# asm 1: pand  <xmm6=int6464#4,<xmm14=int6464#15
2312# asm 2: pand  <xmm6=%xmm3,<xmm14=%xmm14
2313pand  %xmm3,%xmm14
2314
2315# qhasm:           xmm13 &= xmm7
2316# asm 1: pand  <xmm7=int6464#6,<xmm13=int6464#14
2317# asm 2: pand  <xmm7=%xmm5,<xmm13=%xmm13
2318pand  %xmm5,%xmm13
2319
2320# qhasm:           xmm14 ^= xmm13
2321# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
2322# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
2323pxor  %xmm13,%xmm14
2324
2325# qhasm:           xmm13 ^= xmm2
2326# asm 1: pxor  <xmm2=int6464#3,<xmm13=int6464#14
2327# asm 2: pxor  <xmm2=%xmm2,<xmm13=%xmm13
2328pxor  %xmm2,%xmm13
2329
2330# qhasm:         xmm4 ^= xmm8
2331# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#1
2332# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm0
2333pxor  %xmm8,%xmm0
2334
2335# qhasm:         xmm0 ^= xmm11
2336# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#2
2337# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm1
2338pxor  %xmm11,%xmm1
2339
2340# qhasm:         xmm7 ^= xmm5
2341# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
2342# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
2343pxor  %xmm7,%xmm5
2344
2345# qhasm:         xmm6 ^= xmm1
2346# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
2347# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
2348pxor  %xmm4,%xmm3
2349
2350# qhasm:           xmm3 = xmm7
2351# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2352# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2353movdqa %xmm5,%xmm2
2354
2355# qhasm:           xmm3 ^= xmm6
2356# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
2357# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
2358pxor  %xmm3,%xmm2
2359
2360# qhasm:           xmm3 &= xmm4
2361# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
2362# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
2363pand  %xmm0,%xmm2
2364
2365# qhasm:           xmm4 ^= xmm0
2366# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
2367# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
2368pxor  %xmm1,%xmm0
2369
2370# qhasm:           xmm4 &= xmm6
2371# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
2372# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
2373pand  %xmm3,%xmm0
2374
2375# qhasm:           xmm0 &= xmm7
2376# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
2377# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
2378pand  %xmm5,%xmm1
2379
2380# qhasm:           xmm0 ^= xmm4
2381# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
2382# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
2383pxor  %xmm0,%xmm1
2384
2385# qhasm:           xmm4 ^= xmm3
2386# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
2387# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
2388pxor  %xmm2,%xmm0
2389
2390# qhasm:           xmm2 = xmm5
2391# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2392# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2393movdqa %xmm7,%xmm2
2394
2395# qhasm:           xmm2 ^= xmm1
2396# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
2397# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
2398pxor  %xmm4,%xmm2
2399
2400# qhasm:           xmm2 &= xmm8
2401# asm 1: pand  <xmm8=int6464#9,<xmm2=int6464#3
2402# asm 2: pand  <xmm8=%xmm8,<xmm2=%xmm2
2403pand  %xmm8,%xmm2
2404
2405# qhasm:           xmm8 ^= xmm11
2406# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
2407# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
2408pxor  %xmm11,%xmm8
2409
2410# qhasm:           xmm8 &= xmm1
2411# asm 1: pand  <xmm1=int6464#5,<xmm8=int6464#9
2412# asm 2: pand  <xmm1=%xmm4,<xmm8=%xmm8
2413pand  %xmm4,%xmm8
2414
2415# qhasm:           xmm11 &= xmm5
2416# asm 1: pand  <xmm5=int6464#8,<xmm11=int6464#12
2417# asm 2: pand  <xmm5=%xmm7,<xmm11=%xmm11
2418pand  %xmm7,%xmm11
2419
2420# qhasm:           xmm8 ^= xmm11
2421# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
2422# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
2423pxor  %xmm11,%xmm8
2424
2425# qhasm:           xmm11 ^= xmm2
2426# asm 1: pxor  <xmm2=int6464#3,<xmm11=int6464#12
2427# asm 2: pxor  <xmm2=%xmm2,<xmm11=%xmm11
2428pxor  %xmm2,%xmm11
2429
2430# qhasm:         xmm14 ^= xmm4
2431# asm 1: pxor  <xmm4=int6464#1,<xmm14=int6464#15
2432# asm 2: pxor  <xmm4=%xmm0,<xmm14=%xmm14
2433pxor  %xmm0,%xmm14
2434
2435# qhasm:         xmm8 ^= xmm4
2436# asm 1: pxor  <xmm4=int6464#1,<xmm8=int6464#9
2437# asm 2: pxor  <xmm4=%xmm0,<xmm8=%xmm8
2438pxor  %xmm0,%xmm8
2439
2440# qhasm:         xmm13 ^= xmm0
2441# asm 1: pxor  <xmm0=int6464#2,<xmm13=int6464#14
2442# asm 2: pxor  <xmm0=%xmm1,<xmm13=%xmm13
2443pxor  %xmm1,%xmm13
2444
2445# qhasm:         xmm11 ^= xmm0
2446# asm 1: pxor  <xmm0=int6464#2,<xmm11=int6464#12
2447# asm 2: pxor  <xmm0=%xmm1,<xmm11=%xmm11
2448pxor  %xmm1,%xmm11
2449
2450# qhasm:         xmm4 = xmm15
2451# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
2452# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
2453movdqa %xmm15,%xmm0
2454
2455# qhasm:         xmm0 = xmm9
2456# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
2457# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
2458movdqa %xmm9,%xmm1
2459
2460# qhasm:         xmm4 ^= xmm12
2461# asm 1: pxor  <xmm12=int6464#13,<xmm4=int6464#1
2462# asm 2: pxor  <xmm12=%xmm12,<xmm4=%xmm0
2463pxor  %xmm12,%xmm0
2464
2465# qhasm:         xmm0 ^= xmm10
2466# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#2
2467# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm1
2468pxor  %xmm10,%xmm1
2469
2470# qhasm:           xmm3 = xmm7
2471# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2472# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2473movdqa %xmm5,%xmm2
2474
2475# qhasm:           xmm3 ^= xmm6
2476# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
2477# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
2478pxor  %xmm3,%xmm2
2479
2480# qhasm:           xmm3 &= xmm4
2481# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
2482# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
2483pand  %xmm0,%xmm2
2484
2485# qhasm:           xmm4 ^= xmm0
2486# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
2487# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
2488pxor  %xmm1,%xmm0
2489
2490# qhasm:           xmm4 &= xmm6
2491# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
2492# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
2493pand  %xmm3,%xmm0
2494
2495# qhasm:           xmm0 &= xmm7
2496# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
2497# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
2498pand  %xmm5,%xmm1
2499
2500# qhasm:           xmm0 ^= xmm4
2501# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
2502# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
2503pxor  %xmm0,%xmm1
2504
2505# qhasm:           xmm4 ^= xmm3
2506# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
2507# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
2508pxor  %xmm2,%xmm0
2509
2510# qhasm:           xmm2 = xmm5
2511# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2512# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2513movdqa %xmm7,%xmm2
2514
2515# qhasm:           xmm2 ^= xmm1
2516# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
2517# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
2518pxor  %xmm4,%xmm2
2519
2520# qhasm:           xmm2 &= xmm12
2521# asm 1: pand  <xmm12=int6464#13,<xmm2=int6464#3
2522# asm 2: pand  <xmm12=%xmm12,<xmm2=%xmm2
2523pand  %xmm12,%xmm2
2524
2525# qhasm:           xmm12 ^= xmm10
2526# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
2527# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
2528pxor  %xmm10,%xmm12
2529
2530# qhasm:           xmm12 &= xmm1
2531# asm 1: pand  <xmm1=int6464#5,<xmm12=int6464#13
2532# asm 2: pand  <xmm1=%xmm4,<xmm12=%xmm12
2533pand  %xmm4,%xmm12
2534
2535# qhasm:           xmm10 &= xmm5
2536# asm 1: pand  <xmm5=int6464#8,<xmm10=int6464#11
2537# asm 2: pand  <xmm5=%xmm7,<xmm10=%xmm10
2538pand  %xmm7,%xmm10
2539
2540# qhasm:           xmm12 ^= xmm10
2541# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
2542# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
2543pxor  %xmm10,%xmm12
2544
2545# qhasm:           xmm10 ^= xmm2
2546# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#11
2547# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm10
2548pxor  %xmm2,%xmm10
2549
2550# qhasm:         xmm7 ^= xmm5
2551# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
2552# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
2553pxor  %xmm7,%xmm5
2554
2555# qhasm:         xmm6 ^= xmm1
2556# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
2557# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
2558pxor  %xmm4,%xmm3
2559
2560# qhasm:           xmm3 = xmm7
2561# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2562# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2563movdqa %xmm5,%xmm2
2564
2565# qhasm:           xmm3 ^= xmm6
2566# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
2567# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
2568pxor  %xmm3,%xmm2
2569
2570# qhasm:           xmm3 &= xmm15
2571# asm 1: pand  <xmm15=int6464#16,<xmm3=int6464#3
2572# asm 2: pand  <xmm15=%xmm15,<xmm3=%xmm2
2573pand  %xmm15,%xmm2
2574
2575# qhasm:           xmm15 ^= xmm9
2576# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
2577# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
2578pxor  %xmm9,%xmm15
2579
2580# qhasm:           xmm15 &= xmm6
2581# asm 1: pand  <xmm6=int6464#4,<xmm15=int6464#16
2582# asm 2: pand  <xmm6=%xmm3,<xmm15=%xmm15
2583pand  %xmm3,%xmm15
2584
2585# qhasm:           xmm9 &= xmm7
2586# asm 1: pand  <xmm7=int6464#6,<xmm9=int6464#10
2587# asm 2: pand  <xmm7=%xmm5,<xmm9=%xmm9
2588pand  %xmm5,%xmm9
2589
2590# qhasm:           xmm15 ^= xmm9
2591# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
2592# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
2593pxor  %xmm9,%xmm15
2594
2595# qhasm:           xmm9 ^= xmm3
2596# asm 1: pxor  <xmm3=int6464#3,<xmm9=int6464#10
2597# asm 2: pxor  <xmm3=%xmm2,<xmm9=%xmm9
2598pxor  %xmm2,%xmm9
2599
2600# qhasm:         xmm15 ^= xmm4
2601# asm 1: pxor  <xmm4=int6464#1,<xmm15=int6464#16
2602# asm 2: pxor  <xmm4=%xmm0,<xmm15=%xmm15
2603pxor  %xmm0,%xmm15
2604
2605# qhasm:         xmm12 ^= xmm4
2606# asm 1: pxor  <xmm4=int6464#1,<xmm12=int6464#13
2607# asm 2: pxor  <xmm4=%xmm0,<xmm12=%xmm12
2608pxor  %xmm0,%xmm12
2609
2610# qhasm:         xmm9 ^= xmm0
2611# asm 1: pxor  <xmm0=int6464#2,<xmm9=int6464#10
2612# asm 2: pxor  <xmm0=%xmm1,<xmm9=%xmm9
2613pxor  %xmm1,%xmm9
2614
2615# qhasm:         xmm10 ^= xmm0
2616# asm 1: pxor  <xmm0=int6464#2,<xmm10=int6464#11
2617# asm 2: pxor  <xmm0=%xmm1,<xmm10=%xmm10
2618pxor  %xmm1,%xmm10
2619
2620# qhasm:       xmm15 ^= xmm8
2621# asm 1: pxor  <xmm8=int6464#9,<xmm15=int6464#16
2622# asm 2: pxor  <xmm8=%xmm8,<xmm15=%xmm15
2623pxor  %xmm8,%xmm15
2624
2625# qhasm:       xmm9 ^= xmm14
2626# asm 1: pxor  <xmm14=int6464#15,<xmm9=int6464#10
2627# asm 2: pxor  <xmm14=%xmm14,<xmm9=%xmm9
2628pxor  %xmm14,%xmm9
2629
2630# qhasm:       xmm12 ^= xmm15
2631# asm 1: pxor  <xmm15=int6464#16,<xmm12=int6464#13
2632# asm 2: pxor  <xmm15=%xmm15,<xmm12=%xmm12
2633pxor  %xmm15,%xmm12
2634
2635# qhasm:       xmm14 ^= xmm8
2636# asm 1: pxor  <xmm8=int6464#9,<xmm14=int6464#15
2637# asm 2: pxor  <xmm8=%xmm8,<xmm14=%xmm14
2638pxor  %xmm8,%xmm14
2639
2640# qhasm:       xmm8 ^= xmm9
2641# asm 1: pxor  <xmm9=int6464#10,<xmm8=int6464#9
2642# asm 2: pxor  <xmm9=%xmm9,<xmm8=%xmm8
2643pxor  %xmm9,%xmm8
2644
2645# qhasm:       xmm9 ^= xmm13
2646# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
2647# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
2648pxor  %xmm13,%xmm9
2649
2650# qhasm:       xmm13 ^= xmm10
2651# asm 1: pxor  <xmm10=int6464#11,<xmm13=int6464#14
2652# asm 2: pxor  <xmm10=%xmm10,<xmm13=%xmm13
2653pxor  %xmm10,%xmm13
2654
2655# qhasm:       xmm12 ^= xmm13
2656# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
2657# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
2658pxor  %xmm13,%xmm12
2659
2660# qhasm:       xmm10 ^= xmm11
2661# asm 1: pxor  <xmm11=int6464#12,<xmm10=int6464#11
2662# asm 2: pxor  <xmm11=%xmm11,<xmm10=%xmm10
2663pxor  %xmm11,%xmm10
2664
2665# qhasm:       xmm11 ^= xmm13
2666# asm 1: pxor  <xmm13=int6464#14,<xmm11=int6464#12
2667# asm 2: pxor  <xmm13=%xmm13,<xmm11=%xmm11
2668pxor  %xmm13,%xmm11
2669
2670# qhasm:       xmm14 ^= xmm11
2671# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
2672# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
2673pxor  %xmm11,%xmm14
2674
2675# qhasm:     xmm0 = shuffle dwords of xmm8 by 0x93
2676# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
2677# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
2678pshufd $0x93,%xmm8,%xmm0
2679
2680# qhasm:     xmm1 = shuffle dwords of xmm9 by 0x93
2681# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
2682# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
2683pshufd $0x93,%xmm9,%xmm1
2684
2685# qhasm:     xmm2 = shuffle dwords of xmm12 by 0x93
2686# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
2687# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
2688pshufd $0x93,%xmm12,%xmm2
2689
2690# qhasm:     xmm3 = shuffle dwords of xmm14 by 0x93
2691# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
2692# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
2693pshufd $0x93,%xmm14,%xmm3
2694
2695# qhasm:     xmm4 = shuffle dwords of xmm11 by 0x93
2696# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
2697# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
2698pshufd $0x93,%xmm11,%xmm4
2699
2700# qhasm:     xmm5 = shuffle dwords of xmm15 by 0x93
2701# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
2702# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
2703pshufd $0x93,%xmm15,%xmm5
2704
2705# qhasm:     xmm6 = shuffle dwords of xmm10 by 0x93
2706# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
2707# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
2708pshufd $0x93,%xmm10,%xmm6
2709
2710# qhasm:     xmm7 = shuffle dwords of xmm13 by 0x93
2711# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
2712# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
2713pshufd $0x93,%xmm13,%xmm7
2714
2715# qhasm:     xmm8 ^= xmm0
2716# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
2717# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
2718pxor  %xmm0,%xmm8
2719
2720# qhasm:     xmm9 ^= xmm1
2721# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
2722# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
2723pxor  %xmm1,%xmm9
2724
2725# qhasm:     xmm12 ^= xmm2
2726# asm 1: pxor  <xmm2=int6464#3,<xmm12=int6464#13
2727# asm 2: pxor  <xmm2=%xmm2,<xmm12=%xmm12
2728pxor  %xmm2,%xmm12
2729
2730# qhasm:     xmm14 ^= xmm3
2731# asm 1: pxor  <xmm3=int6464#4,<xmm14=int6464#15
2732# asm 2: pxor  <xmm3=%xmm3,<xmm14=%xmm14
2733pxor  %xmm3,%xmm14
2734
2735# qhasm:     xmm11 ^= xmm4
2736# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
2737# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
2738pxor  %xmm4,%xmm11
2739
2740# qhasm:     xmm15 ^= xmm5
2741# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
2742# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
2743pxor  %xmm5,%xmm15
2744
2745# qhasm:     xmm10 ^= xmm6
2746# asm 1: pxor  <xmm6=int6464#7,<xmm10=int6464#11
2747# asm 2: pxor  <xmm6=%xmm6,<xmm10=%xmm10
2748pxor  %xmm6,%xmm10
2749
2750# qhasm:     xmm13 ^= xmm7
2751# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
2752# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
2753pxor  %xmm7,%xmm13
2754
2755# qhasm:     xmm0 ^= xmm13
2756# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
2757# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
2758pxor  %xmm13,%xmm0
2759
2760# qhasm:     xmm1 ^= xmm8
2761# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
2762# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
2763pxor  %xmm8,%xmm1
2764
2765# qhasm:     xmm2 ^= xmm9
2766# asm 1: pxor  <xmm9=int6464#10,<xmm2=int6464#3
2767# asm 2: pxor  <xmm9=%xmm9,<xmm2=%xmm2
2768pxor  %xmm9,%xmm2
2769
2770# qhasm:     xmm1 ^= xmm13
2771# asm 1: pxor  <xmm13=int6464#14,<xmm1=int6464#2
2772# asm 2: pxor  <xmm13=%xmm13,<xmm1=%xmm1
2773pxor  %xmm13,%xmm1
2774
2775# qhasm:     xmm3 ^= xmm12
2776# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
2777# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
2778pxor  %xmm12,%xmm3
2779
2780# qhasm:     xmm4 ^= xmm14
2781# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#5
2782# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm4
2783pxor  %xmm14,%xmm4
2784
2785# qhasm:     xmm5 ^= xmm11
2786# asm 1: pxor  <xmm11=int6464#12,<xmm5=int6464#6
2787# asm 2: pxor  <xmm11=%xmm11,<xmm5=%xmm5
2788pxor  %xmm11,%xmm5
2789
2790# qhasm:     xmm3 ^= xmm13
2791# asm 1: pxor  <xmm13=int6464#14,<xmm3=int6464#4
2792# asm 2: pxor  <xmm13=%xmm13,<xmm3=%xmm3
2793pxor  %xmm13,%xmm3
2794
2795# qhasm:     xmm6 ^= xmm15
2796# asm 1: pxor  <xmm15=int6464#16,<xmm6=int6464#7
2797# asm 2: pxor  <xmm15=%xmm15,<xmm6=%xmm6
2798pxor  %xmm15,%xmm6
2799
2800# qhasm:     xmm7 ^= xmm10
2801# asm 1: pxor  <xmm10=int6464#11,<xmm7=int6464#8
2802# asm 2: pxor  <xmm10=%xmm10,<xmm7=%xmm7
2803pxor  %xmm10,%xmm7
2804
2805# qhasm:     xmm4 ^= xmm13
2806# asm 1: pxor  <xmm13=int6464#14,<xmm4=int6464#5
2807# asm 2: pxor  <xmm13=%xmm13,<xmm4=%xmm4
2808pxor  %xmm13,%xmm4
2809
2810# qhasm:     xmm8 = shuffle dwords of xmm8 by 0x4E
2811# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
2812# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
2813pshufd $0x4E,%xmm8,%xmm8
2814
2815# qhasm:     xmm9 = shuffle dwords of xmm9 by 0x4E
2816# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
2817# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
2818pshufd $0x4E,%xmm9,%xmm9
2819
2820# qhasm:     xmm12 = shuffle dwords of xmm12 by 0x4E
2821# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
2822# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
2823pshufd $0x4E,%xmm12,%xmm12
2824
2825# qhasm:     xmm14 = shuffle dwords of xmm14 by 0x4E
2826# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
2827# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
2828pshufd $0x4E,%xmm14,%xmm14
2829
2830# qhasm:     xmm11 = shuffle dwords of xmm11 by 0x4E
2831# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
2832# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
2833pshufd $0x4E,%xmm11,%xmm11
2834
2835# qhasm:     xmm15 = shuffle dwords of xmm15 by 0x4E
2836# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
2837# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
2838pshufd $0x4E,%xmm15,%xmm15
2839
2840# qhasm:     xmm10 = shuffle dwords of xmm10 by 0x4E
2841# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
2842# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
2843pshufd $0x4E,%xmm10,%xmm10
2844
2845# qhasm:     xmm13 = shuffle dwords of xmm13 by 0x4E
2846# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
2847# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
2848pshufd $0x4E,%xmm13,%xmm13
2849
2850# qhasm:     xmm0 ^= xmm8
2851# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
2852# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
2853pxor  %xmm8,%xmm0
2854
2855# qhasm:     xmm1 ^= xmm9
2856# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
2857# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
2858pxor  %xmm9,%xmm1
2859
2860# qhasm:     xmm2 ^= xmm12
2861# asm 1: pxor  <xmm12=int6464#13,<xmm2=int6464#3
2862# asm 2: pxor  <xmm12=%xmm12,<xmm2=%xmm2
2863pxor  %xmm12,%xmm2
2864
2865# qhasm:     xmm3 ^= xmm14
2866# asm 1: pxor  <xmm14=int6464#15,<xmm3=int6464#4
2867# asm 2: pxor  <xmm14=%xmm14,<xmm3=%xmm3
2868pxor  %xmm14,%xmm3
2869
2870# qhasm:     xmm4 ^= xmm11
2871# asm 1: pxor  <xmm11=int6464#12,<xmm4=int6464#5
2872# asm 2: pxor  <xmm11=%xmm11,<xmm4=%xmm4
2873pxor  %xmm11,%xmm4
2874
2875# qhasm:     xmm5 ^= xmm15
2876# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
2877# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
2878pxor  %xmm15,%xmm5
2879
2880# qhasm:     xmm6 ^= xmm10
2881# asm 1: pxor  <xmm10=int6464#11,<xmm6=int6464#7
2882# asm 2: pxor  <xmm10=%xmm10,<xmm6=%xmm6
2883pxor  %xmm10,%xmm6
2884
2885# qhasm:     xmm7 ^= xmm13
2886# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
2887# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
2888pxor  %xmm13,%xmm7
2889
2890# qhasm:     xmm0 ^= *(int128 *)(c + 256)
2891# asm 1: pxor 256(<c=int64#4),<xmm0=int6464#1
2892# asm 2: pxor 256(<c=%rcx),<xmm0=%xmm0
2893pxor 256(%rcx),%xmm0
2894
2895# qhasm:     shuffle bytes of xmm0 by SR
2896# asm 1: pshufb SR,<xmm0=int6464#1
2897# asm 2: pshufb SR,<xmm0=%xmm0
2898pshufb SR,%xmm0
2899
2900# qhasm:     xmm1 ^= *(int128 *)(c + 272)
2901# asm 1: pxor 272(<c=int64#4),<xmm1=int6464#2
2902# asm 2: pxor 272(<c=%rcx),<xmm1=%xmm1
2903pxor 272(%rcx),%xmm1
2904
2905# qhasm:     shuffle bytes of xmm1 by SR
2906# asm 1: pshufb SR,<xmm1=int6464#2
2907# asm 2: pshufb SR,<xmm1=%xmm1
2908pshufb SR,%xmm1
2909
2910# qhasm:     xmm2 ^= *(int128 *)(c + 288)
2911# asm 1: pxor 288(<c=int64#4),<xmm2=int6464#3
2912# asm 2: pxor 288(<c=%rcx),<xmm2=%xmm2
2913pxor 288(%rcx),%xmm2
2914
2915# qhasm:     shuffle bytes of xmm2 by SR
2916# asm 1: pshufb SR,<xmm2=int6464#3
2917# asm 2: pshufb SR,<xmm2=%xmm2
2918pshufb SR,%xmm2
2919
2920# qhasm:     xmm3 ^= *(int128 *)(c + 304)
2921# asm 1: pxor 304(<c=int64#4),<xmm3=int6464#4
2922# asm 2: pxor 304(<c=%rcx),<xmm3=%xmm3
2923pxor 304(%rcx),%xmm3
2924
2925# qhasm:     shuffle bytes of xmm3 by SR
2926# asm 1: pshufb SR,<xmm3=int6464#4
2927# asm 2: pshufb SR,<xmm3=%xmm3
2928pshufb SR,%xmm3
2929
2930# qhasm:     xmm4 ^= *(int128 *)(c + 320)
2931# asm 1: pxor 320(<c=int64#4),<xmm4=int6464#5
2932# asm 2: pxor 320(<c=%rcx),<xmm4=%xmm4
2933pxor 320(%rcx),%xmm4
2934
2935# qhasm:     shuffle bytes of xmm4 by SR
2936# asm 1: pshufb SR,<xmm4=int6464#5
2937# asm 2: pshufb SR,<xmm4=%xmm4
2938pshufb SR,%xmm4
2939
2940# qhasm:     xmm5 ^= *(int128 *)(c + 336)
2941# asm 1: pxor 336(<c=int64#4),<xmm5=int6464#6
2942# asm 2: pxor 336(<c=%rcx),<xmm5=%xmm5
2943pxor 336(%rcx),%xmm5
2944
2945# qhasm:     shuffle bytes of xmm5 by SR
2946# asm 1: pshufb SR,<xmm5=int6464#6
2947# asm 2: pshufb SR,<xmm5=%xmm5
2948pshufb SR,%xmm5
2949
2950# qhasm:     xmm6 ^= *(int128 *)(c + 352)
2951# asm 1: pxor 352(<c=int64#4),<xmm6=int6464#7
2952# asm 2: pxor 352(<c=%rcx),<xmm6=%xmm6
2953pxor 352(%rcx),%xmm6
2954
2955# qhasm:     shuffle bytes of xmm6 by SR
2956# asm 1: pshufb SR,<xmm6=int6464#7
2957# asm 2: pshufb SR,<xmm6=%xmm6
2958pshufb SR,%xmm6
2959
2960# qhasm:     xmm7 ^= *(int128 *)(c + 368)
2961# asm 1: pxor 368(<c=int64#4),<xmm7=int6464#8
2962# asm 2: pxor 368(<c=%rcx),<xmm7=%xmm7
2963pxor 368(%rcx),%xmm7
2964
2965# qhasm:     shuffle bytes of xmm7 by SR
2966# asm 1: pshufb SR,<xmm7=int6464#8
2967# asm 2: pshufb SR,<xmm7=%xmm7
2968pshufb SR,%xmm7
2969
2970# qhasm:       xmm5 ^= xmm6
2971# asm 1: pxor  <xmm6=int6464#7,<xmm5=int6464#6
2972# asm 2: pxor  <xmm6=%xmm6,<xmm5=%xmm5
2973pxor  %xmm6,%xmm5
2974
2975# qhasm:       xmm2 ^= xmm1
2976# asm 1: pxor  <xmm1=int6464#2,<xmm2=int6464#3
2977# asm 2: pxor  <xmm1=%xmm1,<xmm2=%xmm2
2978pxor  %xmm1,%xmm2
2979
2980# qhasm:       xmm5 ^= xmm0
2981# asm 1: pxor  <xmm0=int6464#1,<xmm5=int6464#6
2982# asm 2: pxor  <xmm0=%xmm0,<xmm5=%xmm5
2983pxor  %xmm0,%xmm5
2984
2985# qhasm:       xmm6 ^= xmm2
2986# asm 1: pxor  <xmm2=int6464#3,<xmm6=int6464#7
2987# asm 2: pxor  <xmm2=%xmm2,<xmm6=%xmm6
2988pxor  %xmm2,%xmm6
2989
2990# qhasm:       xmm3 ^= xmm0
2991# asm 1: pxor  <xmm0=int6464#1,<xmm3=int6464#4
2992# asm 2: pxor  <xmm0=%xmm0,<xmm3=%xmm3
2993pxor  %xmm0,%xmm3
2994
2995# qhasm:       xmm6 ^= xmm3
2996# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
2997# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
2998pxor  %xmm3,%xmm6
2999
3000# qhasm:       xmm3 ^= xmm7
3001# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#4
3002# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm3
3003pxor  %xmm7,%xmm3
3004
3005# qhasm:       xmm3 ^= xmm4
3006# asm 1: pxor  <xmm4=int6464#5,<xmm3=int6464#4
3007# asm 2: pxor  <xmm4=%xmm4,<xmm3=%xmm3
3008pxor  %xmm4,%xmm3
3009
3010# qhasm:       xmm7 ^= xmm5
3011# asm 1: pxor  <xmm5=int6464#6,<xmm7=int6464#8
3012# asm 2: pxor  <xmm5=%xmm5,<xmm7=%xmm7
3013pxor  %xmm5,%xmm7
3014
3015# qhasm:       xmm3 ^= xmm1
3016# asm 1: pxor  <xmm1=int6464#2,<xmm3=int6464#4
3017# asm 2: pxor  <xmm1=%xmm1,<xmm3=%xmm3
3018pxor  %xmm1,%xmm3
3019
3020# qhasm:       xmm4 ^= xmm5
3021# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
3022# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
3023pxor  %xmm5,%xmm4
3024
3025# qhasm:       xmm2 ^= xmm7
3026# asm 1: pxor  <xmm7=int6464#8,<xmm2=int6464#3
3027# asm 2: pxor  <xmm7=%xmm7,<xmm2=%xmm2
3028pxor  %xmm7,%xmm2
3029
3030# qhasm:       xmm1 ^= xmm5
3031# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
3032# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
3033pxor  %xmm5,%xmm1
3034
3035# qhasm:       xmm11 = xmm7
3036# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3037# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3038movdqa %xmm7,%xmm8
3039
3040# qhasm:       xmm10 = xmm1
3041# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3042# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3043movdqa %xmm1,%xmm9
3044
3045# qhasm:       xmm9 = xmm5
3046# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3047# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3048movdqa %xmm5,%xmm10
3049
3050# qhasm:       xmm13 = xmm2
3051# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
3052# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
3053movdqa %xmm2,%xmm11
3054
3055# qhasm:       xmm12 = xmm6
3056# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
3057# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
3058movdqa %xmm6,%xmm12
3059
3060# qhasm:       xmm11 ^= xmm4
3061# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#9
3062# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm8
3063pxor  %xmm4,%xmm8
3064
3065# qhasm:       xmm10 ^= xmm2
3066# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#10
3067# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm9
3068pxor  %xmm2,%xmm9
3069
3070# qhasm:       xmm9 ^= xmm3
3071# asm 1: pxor  <xmm3=int6464#4,<xmm9=int6464#11
3072# asm 2: pxor  <xmm3=%xmm3,<xmm9=%xmm10
3073pxor  %xmm3,%xmm10
3074
3075# qhasm:       xmm13 ^= xmm4
3076# asm 1: pxor  <xmm4=int6464#5,<xmm13=int6464#12
3077# asm 2: pxor  <xmm4=%xmm4,<xmm13=%xmm11
3078pxor  %xmm4,%xmm11
3079
3080# qhasm:       xmm12 ^= xmm0
3081# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
3082# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
3083pxor  %xmm0,%xmm12
3084
3085# qhasm:       xmm14 = xmm11
3086# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3087# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3088movdqa %xmm8,%xmm13
3089
3090# qhasm:       xmm8 = xmm10
3091# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3092# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3093movdqa %xmm9,%xmm14
3094
3095# qhasm:       xmm15 = xmm11
3096# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3097# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3098movdqa %xmm8,%xmm15
3099
3100# qhasm:       xmm10 |= xmm9
3101# asm 1: por   <xmm9=int6464#11,<xmm10=int6464#10
3102# asm 2: por   <xmm9=%xmm10,<xmm10=%xmm9
3103por   %xmm10,%xmm9
3104
3105# qhasm:       xmm11 |= xmm12
3106# asm 1: por   <xmm12=int6464#13,<xmm11=int6464#9
3107# asm 2: por   <xmm12=%xmm12,<xmm11=%xmm8
3108por   %xmm12,%xmm8
3109
3110# qhasm:       xmm15 ^= xmm8
3111# asm 1: pxor  <xmm8=int6464#15,<xmm15=int6464#16
3112# asm 2: pxor  <xmm8=%xmm14,<xmm15=%xmm15
3113pxor  %xmm14,%xmm15
3114
3115# qhasm:       xmm14 &= xmm12
3116# asm 1: pand  <xmm12=int6464#13,<xmm14=int6464#14
3117# asm 2: pand  <xmm12=%xmm12,<xmm14=%xmm13
3118pand  %xmm12,%xmm13
3119
3120# qhasm:       xmm8 &= xmm9
3121# asm 1: pand  <xmm9=int6464#11,<xmm8=int6464#15
3122# asm 2: pand  <xmm9=%xmm10,<xmm8=%xmm14
3123pand  %xmm10,%xmm14
3124
3125# qhasm:       xmm12 ^= xmm9
3126# asm 1: pxor  <xmm9=int6464#11,<xmm12=int6464#13
3127# asm 2: pxor  <xmm9=%xmm10,<xmm12=%xmm12
3128pxor  %xmm10,%xmm12
3129
3130# qhasm:       xmm15 &= xmm12
3131# asm 1: pand  <xmm12=int6464#13,<xmm15=int6464#16
3132# asm 2: pand  <xmm12=%xmm12,<xmm15=%xmm15
3133pand  %xmm12,%xmm15
3134
3135# qhasm:       xmm12 = xmm3
3136# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3137# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3138movdqa %xmm3,%xmm10
3139
3140# qhasm:       xmm12 ^= xmm0
3141# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#11
3142# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm10
3143pxor  %xmm0,%xmm10
3144
3145# qhasm:       xmm13 &= xmm12
3146# asm 1: pand  <xmm12=int6464#11,<xmm13=int6464#12
3147# asm 2: pand  <xmm12=%xmm10,<xmm13=%xmm11
3148pand  %xmm10,%xmm11
3149
3150# qhasm:       xmm11 ^= xmm13
3151# asm 1: pxor  <xmm13=int6464#12,<xmm11=int6464#9
3152# asm 2: pxor  <xmm13=%xmm11,<xmm11=%xmm8
3153pxor  %xmm11,%xmm8
3154
3155# qhasm:       xmm10 ^= xmm13
3156# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
3157# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
3158pxor  %xmm11,%xmm9
3159
3160# qhasm:       xmm13 = xmm7
3161# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3162# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3163movdqa %xmm7,%xmm10
3164
3165# qhasm:       xmm13 ^= xmm1
3166# asm 1: pxor  <xmm1=int6464#2,<xmm13=int6464#11
3167# asm 2: pxor  <xmm1=%xmm1,<xmm13=%xmm10
3168pxor  %xmm1,%xmm10
3169
3170# qhasm:       xmm12 = xmm5
3171# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3172# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3173movdqa %xmm5,%xmm11
3174
3175# qhasm:       xmm9 = xmm13
3176# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3177# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3178movdqa %xmm10,%xmm12
3179
3180# qhasm:       xmm12 ^= xmm6
3181# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#12
3182# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm11
3183pxor  %xmm6,%xmm11
3184
3185# qhasm:       xmm9 |= xmm12
3186# asm 1: por   <xmm12=int6464#12,<xmm9=int6464#13
3187# asm 2: por   <xmm12=%xmm11,<xmm9=%xmm12
3188por   %xmm11,%xmm12
3189
3190# qhasm:       xmm13 &= xmm12
3191# asm 1: pand  <xmm12=int6464#12,<xmm13=int6464#11
3192# asm 2: pand  <xmm12=%xmm11,<xmm13=%xmm10
3193pand  %xmm11,%xmm10
3194
3195# qhasm:       xmm8 ^= xmm13
3196# asm 1: pxor  <xmm13=int6464#11,<xmm8=int6464#15
3197# asm 2: pxor  <xmm13=%xmm10,<xmm8=%xmm14
3198pxor  %xmm10,%xmm14
3199
3200# qhasm:       xmm11 ^= xmm15
3201# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#9
3202# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm8
3203pxor  %xmm15,%xmm8
3204
3205# qhasm:       xmm10 ^= xmm14
3206# asm 1: pxor  <xmm14=int6464#14,<xmm10=int6464#10
3207# asm 2: pxor  <xmm14=%xmm13,<xmm10=%xmm9
3208pxor  %xmm13,%xmm9
3209
3210# qhasm:       xmm9 ^= xmm15
3211# asm 1: pxor  <xmm15=int6464#16,<xmm9=int6464#13
3212# asm 2: pxor  <xmm15=%xmm15,<xmm9=%xmm12
3213pxor  %xmm15,%xmm12
3214
3215# qhasm:       xmm8 ^= xmm14
3216# asm 1: pxor  <xmm14=int6464#14,<xmm8=int6464#15
3217# asm 2: pxor  <xmm14=%xmm13,<xmm8=%xmm14
3218pxor  %xmm13,%xmm14
3219
3220# qhasm:       xmm9 ^= xmm14
3221# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
3222# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
3223pxor  %xmm13,%xmm12
3224
3225# qhasm:       xmm12 = xmm2
3226# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3227# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3228movdqa %xmm2,%xmm10
3229
3230# qhasm:       xmm13 = xmm4
3231# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
3232# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
3233movdqa %xmm4,%xmm11
3234
3235# qhasm:       xmm14 = xmm1
3236# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3237# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3238movdqa %xmm1,%xmm13
3239
3240# qhasm:       xmm15 = xmm7
3241# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3242# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3243movdqa %xmm7,%xmm15
3244
3245# qhasm:       xmm12 &= xmm3
3246# asm 1: pand  <xmm3=int6464#4,<xmm12=int6464#11
3247# asm 2: pand  <xmm3=%xmm3,<xmm12=%xmm10
3248pand  %xmm3,%xmm10
3249
3250# qhasm:       xmm13 &= xmm0
3251# asm 1: pand  <xmm0=int6464#1,<xmm13=int6464#12
3252# asm 2: pand  <xmm0=%xmm0,<xmm13=%xmm11
3253pand  %xmm0,%xmm11
3254
3255# qhasm:       xmm14 &= xmm5
3256# asm 1: pand  <xmm5=int6464#6,<xmm14=int6464#14
3257# asm 2: pand  <xmm5=%xmm5,<xmm14=%xmm13
3258pand  %xmm5,%xmm13
3259
3260# qhasm:       xmm15 |= xmm6
3261# asm 1: por   <xmm6=int6464#7,<xmm15=int6464#16
3262# asm 2: por   <xmm6=%xmm6,<xmm15=%xmm15
3263por   %xmm6,%xmm15
3264
3265# qhasm:       xmm11 ^= xmm12
3266# asm 1: pxor  <xmm12=int6464#11,<xmm11=int6464#9
3267# asm 2: pxor  <xmm12=%xmm10,<xmm11=%xmm8
3268pxor  %xmm10,%xmm8
3269
3270# qhasm:       xmm10 ^= xmm13
3271# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
3272# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
3273pxor  %xmm11,%xmm9
3274
3275# qhasm:       xmm9 ^= xmm14
3276# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
3277# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
3278pxor  %xmm13,%xmm12
3279
3280# qhasm:       xmm8 ^= xmm15
3281# asm 1: pxor  <xmm15=int6464#16,<xmm8=int6464#15
3282# asm 2: pxor  <xmm15=%xmm15,<xmm8=%xmm14
3283pxor  %xmm15,%xmm14
3284
3285# qhasm:       xmm12 = xmm11
3286# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3287# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3288movdqa %xmm8,%xmm10
3289
3290# qhasm:       xmm12 ^= xmm10
3291# asm 1: pxor  <xmm10=int6464#10,<xmm12=int6464#11
3292# asm 2: pxor  <xmm10=%xmm9,<xmm12=%xmm10
3293pxor  %xmm9,%xmm10
3294
3295# qhasm:       xmm11 &= xmm9
3296# asm 1: pand  <xmm9=int6464#13,<xmm11=int6464#9
3297# asm 2: pand  <xmm9=%xmm12,<xmm11=%xmm8
3298pand  %xmm12,%xmm8
3299
3300# qhasm:       xmm14 = xmm8
3301# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3302# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3303movdqa %xmm14,%xmm11
3304
3305# qhasm:       xmm14 ^= xmm11
3306# asm 1: pxor  <xmm11=int6464#9,<xmm14=int6464#12
3307# asm 2: pxor  <xmm11=%xmm8,<xmm14=%xmm11
3308pxor  %xmm8,%xmm11
3309
3310# qhasm:       xmm15 = xmm12
3311# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3312# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3313movdqa %xmm10,%xmm13
3314
3315# qhasm:       xmm15 &= xmm14
3316# asm 1: pand  <xmm14=int6464#12,<xmm15=int6464#14
3317# asm 2: pand  <xmm14=%xmm11,<xmm15=%xmm13
3318pand  %xmm11,%xmm13
3319
3320# qhasm:       xmm15 ^= xmm10
3321# asm 1: pxor  <xmm10=int6464#10,<xmm15=int6464#14
3322# asm 2: pxor  <xmm10=%xmm9,<xmm15=%xmm13
3323pxor  %xmm9,%xmm13
3324
3325# qhasm:       xmm13 = xmm9
3326# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3327# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3328movdqa %xmm12,%xmm15
3329
3330# qhasm:       xmm13 ^= xmm8
3331# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
3332# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
3333pxor  %xmm14,%xmm15
3334
3335# qhasm:       xmm11 ^= xmm10
3336# asm 1: pxor  <xmm10=int6464#10,<xmm11=int6464#9
3337# asm 2: pxor  <xmm10=%xmm9,<xmm11=%xmm8
3338pxor  %xmm9,%xmm8
3339
3340# qhasm:       xmm13 &= xmm11
3341# asm 1: pand  <xmm11=int6464#9,<xmm13=int6464#16
3342# asm 2: pand  <xmm11=%xmm8,<xmm13=%xmm15
3343pand  %xmm8,%xmm15
3344
3345# qhasm:       xmm13 ^= xmm8
3346# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
3347# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
3348pxor  %xmm14,%xmm15
3349
3350# qhasm:       xmm9 ^= xmm13
3351# asm 1: pxor  <xmm13=int6464#16,<xmm9=int6464#13
3352# asm 2: pxor  <xmm13=%xmm15,<xmm9=%xmm12
3353pxor  %xmm15,%xmm12
3354
3355# qhasm:       xmm10 = xmm14
3356# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3357# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3358movdqa %xmm11,%xmm8
3359
3360# qhasm:       xmm10 ^= xmm13
3361# asm 1: pxor  <xmm13=int6464#16,<xmm10=int6464#9
3362# asm 2: pxor  <xmm13=%xmm15,<xmm10=%xmm8
3363pxor  %xmm15,%xmm8
3364
3365# qhasm:       xmm10 &= xmm8
3366# asm 1: pand  <xmm8=int6464#15,<xmm10=int6464#9
3367# asm 2: pand  <xmm8=%xmm14,<xmm10=%xmm8
3368pand  %xmm14,%xmm8
3369
3370# qhasm:       xmm9 ^= xmm10
3371# asm 1: pxor  <xmm10=int6464#9,<xmm9=int6464#13
3372# asm 2: pxor  <xmm10=%xmm8,<xmm9=%xmm12
3373pxor  %xmm8,%xmm12
3374
3375# qhasm:       xmm14 ^= xmm10
3376# asm 1: pxor  <xmm10=int6464#9,<xmm14=int6464#12
3377# asm 2: pxor  <xmm10=%xmm8,<xmm14=%xmm11
3378pxor  %xmm8,%xmm11
3379
3380# qhasm:       xmm14 &= xmm15
3381# asm 1: pand  <xmm15=int6464#14,<xmm14=int6464#12
3382# asm 2: pand  <xmm15=%xmm13,<xmm14=%xmm11
3383pand  %xmm13,%xmm11
3384
3385# qhasm:       xmm14 ^= xmm12
3386# asm 1: pxor  <xmm12=int6464#11,<xmm14=int6464#12
3387# asm 2: pxor  <xmm12=%xmm10,<xmm14=%xmm11
3388pxor  %xmm10,%xmm11
3389
3390# qhasm:         xmm12 = xmm6
3391# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
3392# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
3393movdqa %xmm6,%xmm8
3394
3395# qhasm:         xmm8 = xmm5
3396# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3397# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3398movdqa %xmm5,%xmm9
3399
3400# qhasm:           xmm10 = xmm15
3401# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3402# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3403movdqa %xmm13,%xmm10
3404
3405# qhasm:           xmm10 ^= xmm14
3406# asm 1: pxor  <xmm14=int6464#12,<xmm10=int6464#11
3407# asm 2: pxor  <xmm14=%xmm11,<xmm10=%xmm10
3408pxor  %xmm11,%xmm10
3409
3410# qhasm:           xmm10 &= xmm6
3411# asm 1: pand  <xmm6=int6464#7,<xmm10=int6464#11
3412# asm 2: pand  <xmm6=%xmm6,<xmm10=%xmm10
3413pand  %xmm6,%xmm10
3414
3415# qhasm:           xmm6 ^= xmm5
3416# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
3417# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
3418pxor  %xmm5,%xmm6
3419
3420# qhasm:           xmm6 &= xmm14
3421# asm 1: pand  <xmm14=int6464#12,<xmm6=int6464#7
3422# asm 2: pand  <xmm14=%xmm11,<xmm6=%xmm6
3423pand  %xmm11,%xmm6
3424
3425# qhasm:           xmm5 &= xmm15
3426# asm 1: pand  <xmm15=int6464#14,<xmm5=int6464#6
3427# asm 2: pand  <xmm15=%xmm13,<xmm5=%xmm5
3428pand  %xmm13,%xmm5
3429
3430# qhasm:           xmm6 ^= xmm5
3431# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
3432# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
3433pxor  %xmm5,%xmm6
3434
3435# qhasm:           xmm5 ^= xmm10
3436# asm 1: pxor  <xmm10=int6464#11,<xmm5=int6464#6
3437# asm 2: pxor  <xmm10=%xmm10,<xmm5=%xmm5
3438pxor  %xmm10,%xmm5
3439
3440# qhasm:         xmm12 ^= xmm0
3441# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#9
3442# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm8
3443pxor  %xmm0,%xmm8
3444
3445# qhasm:         xmm8 ^= xmm3
3446# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#10
3447# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm9
3448pxor  %xmm3,%xmm9
3449
3450# qhasm:         xmm15 ^= xmm13
3451# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
3452# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
3453pxor  %xmm15,%xmm13
3454
3455# qhasm:         xmm14 ^= xmm9
3456# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
3457# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
3458pxor  %xmm12,%xmm11
3459
3460# qhasm:           xmm11 = xmm15
3461# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3462# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3463movdqa %xmm13,%xmm10
3464
3465# qhasm:           xmm11 ^= xmm14
3466# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
3467# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
3468pxor  %xmm11,%xmm10
3469
3470# qhasm:           xmm11 &= xmm12
3471# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
3472# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
3473pand  %xmm8,%xmm10
3474
3475# qhasm:           xmm12 ^= xmm8
3476# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
3477# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
3478pxor  %xmm9,%xmm8
3479
3480# qhasm:           xmm12 &= xmm14
3481# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
3482# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
3483pand  %xmm11,%xmm8
3484
3485# qhasm:           xmm8 &= xmm15
3486# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
3487# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
3488pand  %xmm13,%xmm9
3489
3490# qhasm:           xmm8 ^= xmm12
3491# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
3492# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
3493pxor  %xmm8,%xmm9
3494
3495# qhasm:           xmm12 ^= xmm11
3496# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
3497# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
3498pxor  %xmm10,%xmm8
3499
3500# qhasm:           xmm10 = xmm13
3501# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3502# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3503movdqa %xmm15,%xmm10
3504
3505# qhasm:           xmm10 ^= xmm9
3506# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
3507# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
3508pxor  %xmm12,%xmm10
3509
3510# qhasm:           xmm10 &= xmm0
3511# asm 1: pand  <xmm0=int6464#1,<xmm10=int6464#11
3512# asm 2: pand  <xmm0=%xmm0,<xmm10=%xmm10
3513pand  %xmm0,%xmm10
3514
3515# qhasm:           xmm0 ^= xmm3
3516# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
3517# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
3518pxor  %xmm3,%xmm0
3519
3520# qhasm:           xmm0 &= xmm9
3521# asm 1: pand  <xmm9=int6464#13,<xmm0=int6464#1
3522# asm 2: pand  <xmm9=%xmm12,<xmm0=%xmm0
3523pand  %xmm12,%xmm0
3524
3525# qhasm:           xmm3 &= xmm13
3526# asm 1: pand  <xmm13=int6464#16,<xmm3=int6464#4
3527# asm 2: pand  <xmm13=%xmm15,<xmm3=%xmm3
3528pand  %xmm15,%xmm3
3529
3530# qhasm:           xmm0 ^= xmm3
3531# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
3532# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
3533pxor  %xmm3,%xmm0
3534
3535# qhasm:           xmm3 ^= xmm10
3536# asm 1: pxor  <xmm10=int6464#11,<xmm3=int6464#4
3537# asm 2: pxor  <xmm10=%xmm10,<xmm3=%xmm3
3538pxor  %xmm10,%xmm3
3539
3540# qhasm:         xmm6 ^= xmm12
3541# asm 1: pxor  <xmm12=int6464#9,<xmm6=int6464#7
3542# asm 2: pxor  <xmm12=%xmm8,<xmm6=%xmm6
3543pxor  %xmm8,%xmm6
3544
3545# qhasm:         xmm0 ^= xmm12
3546# asm 1: pxor  <xmm12=int6464#9,<xmm0=int6464#1
3547# asm 2: pxor  <xmm12=%xmm8,<xmm0=%xmm0
3548pxor  %xmm8,%xmm0
3549
3550# qhasm:         xmm5 ^= xmm8
3551# asm 1: pxor  <xmm8=int6464#10,<xmm5=int6464#6
3552# asm 2: pxor  <xmm8=%xmm9,<xmm5=%xmm5
3553pxor  %xmm9,%xmm5
3554
3555# qhasm:         xmm3 ^= xmm8
3556# asm 1: pxor  <xmm8=int6464#10,<xmm3=int6464#4
3557# asm 2: pxor  <xmm8=%xmm9,<xmm3=%xmm3
3558pxor  %xmm9,%xmm3
3559
3560# qhasm:         xmm12 = xmm7
3561# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3562# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3563movdqa %xmm7,%xmm8
3564
3565# qhasm:         xmm8 = xmm1
3566# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3567# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3568movdqa %xmm1,%xmm9
3569
3570# qhasm:         xmm12 ^= xmm4
3571# asm 1: pxor  <xmm4=int6464#5,<xmm12=int6464#9
3572# asm 2: pxor  <xmm4=%xmm4,<xmm12=%xmm8
3573pxor  %xmm4,%xmm8
3574
3575# qhasm:         xmm8 ^= xmm2
3576# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#10
3577# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm9
3578pxor  %xmm2,%xmm9
3579
3580# qhasm:           xmm11 = xmm15
3581# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3582# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3583movdqa %xmm13,%xmm10
3584
3585# qhasm:           xmm11 ^= xmm14
3586# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
3587# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
3588pxor  %xmm11,%xmm10
3589
3590# qhasm:           xmm11 &= xmm12
3591# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
3592# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
3593pand  %xmm8,%xmm10
3594
3595# qhasm:           xmm12 ^= xmm8
3596# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
3597# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
3598pxor  %xmm9,%xmm8
3599
3600# qhasm:           xmm12 &= xmm14
3601# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
3602# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
3603pand  %xmm11,%xmm8
3604
3605# qhasm:           xmm8 &= xmm15
3606# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
3607# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
3608pand  %xmm13,%xmm9
3609
3610# qhasm:           xmm8 ^= xmm12
3611# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
3612# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
3613pxor  %xmm8,%xmm9
3614
3615# qhasm:           xmm12 ^= xmm11
3616# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
3617# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
3618pxor  %xmm10,%xmm8
3619
3620# qhasm:           xmm10 = xmm13
3621# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3622# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3623movdqa %xmm15,%xmm10
3624
3625# qhasm:           xmm10 ^= xmm9
3626# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
3627# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
3628pxor  %xmm12,%xmm10
3629
3630# qhasm:           xmm10 &= xmm4
3631# asm 1: pand  <xmm4=int6464#5,<xmm10=int6464#11
3632# asm 2: pand  <xmm4=%xmm4,<xmm10=%xmm10
3633pand  %xmm4,%xmm10
3634
3635# qhasm:           xmm4 ^= xmm2
3636# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
3637# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
3638pxor  %xmm2,%xmm4
3639
3640# qhasm:           xmm4 &= xmm9
3641# asm 1: pand  <xmm9=int6464#13,<xmm4=int6464#5
3642# asm 2: pand  <xmm9=%xmm12,<xmm4=%xmm4
3643pand  %xmm12,%xmm4
3644
3645# qhasm:           xmm2 &= xmm13
3646# asm 1: pand  <xmm13=int6464#16,<xmm2=int6464#3
3647# asm 2: pand  <xmm13=%xmm15,<xmm2=%xmm2
3648pand  %xmm15,%xmm2
3649
3650# qhasm:           xmm4 ^= xmm2
3651# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
3652# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
3653pxor  %xmm2,%xmm4
3654
3655# qhasm:           xmm2 ^= xmm10
3656# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#3
3657# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm2
3658pxor  %xmm10,%xmm2
3659
3660# qhasm:         xmm15 ^= xmm13
3661# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
3662# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
3663pxor  %xmm15,%xmm13
3664
3665# qhasm:         xmm14 ^= xmm9
3666# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
3667# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
3668pxor  %xmm12,%xmm11
3669
3670# qhasm:           xmm11 = xmm15
3671# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3672# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3673movdqa %xmm13,%xmm10
3674
3675# qhasm:           xmm11 ^= xmm14
3676# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
3677# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
3678pxor  %xmm11,%xmm10
3679
3680# qhasm:           xmm11 &= xmm7
3681# asm 1: pand  <xmm7=int6464#8,<xmm11=int6464#11
3682# asm 2: pand  <xmm7=%xmm7,<xmm11=%xmm10
3683pand  %xmm7,%xmm10
3684
3685# qhasm:           xmm7 ^= xmm1
3686# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
3687# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
3688pxor  %xmm1,%xmm7
3689
3690# qhasm:           xmm7 &= xmm14
3691# asm 1: pand  <xmm14=int6464#12,<xmm7=int6464#8
3692# asm 2: pand  <xmm14=%xmm11,<xmm7=%xmm7
3693pand  %xmm11,%xmm7
3694
3695# qhasm:           xmm1 &= xmm15
3696# asm 1: pand  <xmm15=int6464#14,<xmm1=int6464#2
3697# asm 2: pand  <xmm15=%xmm13,<xmm1=%xmm1
3698pand  %xmm13,%xmm1
3699
3700# qhasm:           xmm7 ^= xmm1
3701# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
3702# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
3703pxor  %xmm1,%xmm7
3704
3705# qhasm:           xmm1 ^= xmm11
3706# asm 1: pxor  <xmm11=int6464#11,<xmm1=int6464#2
3707# asm 2: pxor  <xmm11=%xmm10,<xmm1=%xmm1
3708pxor  %xmm10,%xmm1
3709
3710# qhasm:         xmm7 ^= xmm12
3711# asm 1: pxor  <xmm12=int6464#9,<xmm7=int6464#8
3712# asm 2: pxor  <xmm12=%xmm8,<xmm7=%xmm7
3713pxor  %xmm8,%xmm7
3714
3715# qhasm:         xmm4 ^= xmm12
3716# asm 1: pxor  <xmm12=int6464#9,<xmm4=int6464#5
3717# asm 2: pxor  <xmm12=%xmm8,<xmm4=%xmm4
3718pxor  %xmm8,%xmm4
3719
3720# qhasm:         xmm1 ^= xmm8
3721# asm 1: pxor  <xmm8=int6464#10,<xmm1=int6464#2
3722# asm 2: pxor  <xmm8=%xmm9,<xmm1=%xmm1
3723pxor  %xmm9,%xmm1
3724
3725# qhasm:         xmm2 ^= xmm8
3726# asm 1: pxor  <xmm8=int6464#10,<xmm2=int6464#3
3727# asm 2: pxor  <xmm8=%xmm9,<xmm2=%xmm2
3728pxor  %xmm9,%xmm2
3729
3730# qhasm:       xmm7 ^= xmm0
3731# asm 1: pxor  <xmm0=int6464#1,<xmm7=int6464#8
3732# asm 2: pxor  <xmm0=%xmm0,<xmm7=%xmm7
3733pxor  %xmm0,%xmm7
3734
3735# qhasm:       xmm1 ^= xmm6
3736# asm 1: pxor  <xmm6=int6464#7,<xmm1=int6464#2
3737# asm 2: pxor  <xmm6=%xmm6,<xmm1=%xmm1
3738pxor  %xmm6,%xmm1
3739
3740# qhasm:       xmm4 ^= xmm7
3741# asm 1: pxor  <xmm7=int6464#8,<xmm4=int6464#5
3742# asm 2: pxor  <xmm7=%xmm7,<xmm4=%xmm4
3743pxor  %xmm7,%xmm4
3744
3745# qhasm:       xmm6 ^= xmm0
3746# asm 1: pxor  <xmm0=int6464#1,<xmm6=int6464#7
3747# asm 2: pxor  <xmm0=%xmm0,<xmm6=%xmm6
3748pxor  %xmm0,%xmm6
3749
3750# qhasm:       xmm0 ^= xmm1
3751# asm 1: pxor  <xmm1=int6464#2,<xmm0=int6464#1
3752# asm 2: pxor  <xmm1=%xmm1,<xmm0=%xmm0
3753pxor  %xmm1,%xmm0
3754
3755# qhasm:       xmm1 ^= xmm5
3756# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
3757# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
3758pxor  %xmm5,%xmm1
3759
3760# qhasm:       xmm5 ^= xmm2
3761# asm 1: pxor  <xmm2=int6464#3,<xmm5=int6464#6
3762# asm 2: pxor  <xmm2=%xmm2,<xmm5=%xmm5
3763pxor  %xmm2,%xmm5
3764
3765# qhasm:       xmm4 ^= xmm5
3766# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
3767# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
3768pxor  %xmm5,%xmm4
3769
3770# qhasm:       xmm2 ^= xmm3
3771# asm 1: pxor  <xmm3=int6464#4,<xmm2=int6464#3
3772# asm 2: pxor  <xmm3=%xmm3,<xmm2=%xmm2
3773pxor  %xmm3,%xmm2
3774
3775# qhasm:       xmm3 ^= xmm5
3776# asm 1: pxor  <xmm5=int6464#6,<xmm3=int6464#4
3777# asm 2: pxor  <xmm5=%xmm5,<xmm3=%xmm3
3778pxor  %xmm5,%xmm3
3779
3780# qhasm:       xmm6 ^= xmm3
3781# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
3782# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
3783pxor  %xmm3,%xmm6
3784
3785# qhasm:     xmm8 = shuffle dwords of xmm0 by 0x93
3786# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
3787# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
3788pshufd $0x93,%xmm0,%xmm8
3789
3790# qhasm:     xmm9 = shuffle dwords of xmm1 by 0x93
3791# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
3792# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
3793pshufd $0x93,%xmm1,%xmm9
3794
3795# qhasm:     xmm10 = shuffle dwords of xmm4 by 0x93
3796# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
3797# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
3798pshufd $0x93,%xmm4,%xmm10
3799
3800# qhasm:     xmm11 = shuffle dwords of xmm6 by 0x93
3801# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
3802# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
3803pshufd $0x93,%xmm6,%xmm11
3804
3805# qhasm:     xmm12 = shuffle dwords of xmm3 by 0x93
3806# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
3807# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
3808pshufd $0x93,%xmm3,%xmm12
3809
3810# qhasm:     xmm13 = shuffle dwords of xmm7 by 0x93
3811# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
3812# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
3813pshufd $0x93,%xmm7,%xmm13
3814
3815# qhasm:     xmm14 = shuffle dwords of xmm2 by 0x93
3816# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
3817# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
3818pshufd $0x93,%xmm2,%xmm14
3819
3820# qhasm:     xmm15 = shuffle dwords of xmm5 by 0x93
3821# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
3822# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
3823pshufd $0x93,%xmm5,%xmm15
3824
3825# qhasm:     xmm0 ^= xmm8
3826# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
3827# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
3828pxor  %xmm8,%xmm0
3829
3830# qhasm:     xmm1 ^= xmm9
3831# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
3832# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
3833pxor  %xmm9,%xmm1
3834
3835# qhasm:     xmm4 ^= xmm10
3836# asm 1: pxor  <xmm10=int6464#11,<xmm4=int6464#5
3837# asm 2: pxor  <xmm10=%xmm10,<xmm4=%xmm4
3838pxor  %xmm10,%xmm4
3839
3840# qhasm:     xmm6 ^= xmm11
3841# asm 1: pxor  <xmm11=int6464#12,<xmm6=int6464#7
3842# asm 2: pxor  <xmm11=%xmm11,<xmm6=%xmm6
3843pxor  %xmm11,%xmm6
3844
3845# qhasm:     xmm3 ^= xmm12
3846# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
3847# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
3848pxor  %xmm12,%xmm3
3849
3850# qhasm:     xmm7 ^= xmm13
3851# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
3852# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
3853pxor  %xmm13,%xmm7
3854
3855# qhasm:     xmm2 ^= xmm14
3856# asm 1: pxor  <xmm14=int6464#15,<xmm2=int6464#3
3857# asm 2: pxor  <xmm14=%xmm14,<xmm2=%xmm2
3858pxor  %xmm14,%xmm2
3859
3860# qhasm:     xmm5 ^= xmm15
3861# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
3862# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
3863pxor  %xmm15,%xmm5
3864
3865# qhasm:     xmm8 ^= xmm5
3866# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
3867# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
3868pxor  %xmm5,%xmm8
3869
3870# qhasm:     xmm9 ^= xmm0
3871# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
3872# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
3873pxor  %xmm0,%xmm9
3874
3875# qhasm:     xmm10 ^= xmm1
3876# asm 1: pxor  <xmm1=int6464#2,<xmm10=int6464#11
3877# asm 2: pxor  <xmm1=%xmm1,<xmm10=%xmm10
3878pxor  %xmm1,%xmm10
3879
3880# qhasm:     xmm9 ^= xmm5
3881# asm 1: pxor  <xmm5=int6464#6,<xmm9=int6464#10
3882# asm 2: pxor  <xmm5=%xmm5,<xmm9=%xmm9
3883pxor  %xmm5,%xmm9
3884
3885# qhasm:     xmm11 ^= xmm4
3886# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
3887# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
3888pxor  %xmm4,%xmm11
3889
3890# qhasm:     xmm12 ^= xmm6
3891# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#13
3892# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm12
3893pxor  %xmm6,%xmm12
3894
3895# qhasm:     xmm13 ^= xmm3
3896# asm 1: pxor  <xmm3=int6464#4,<xmm13=int6464#14
3897# asm 2: pxor  <xmm3=%xmm3,<xmm13=%xmm13
3898pxor  %xmm3,%xmm13
3899
3900# qhasm:     xmm11 ^= xmm5
3901# asm 1: pxor  <xmm5=int6464#6,<xmm11=int6464#12
3902# asm 2: pxor  <xmm5=%xmm5,<xmm11=%xmm11
3903pxor  %xmm5,%xmm11
3904
3905# qhasm:     xmm14 ^= xmm7
3906# asm 1: pxor  <xmm7=int6464#8,<xmm14=int6464#15
3907# asm 2: pxor  <xmm7=%xmm7,<xmm14=%xmm14
3908pxor  %xmm7,%xmm14
3909
3910# qhasm:     xmm15 ^= xmm2
3911# asm 1: pxor  <xmm2=int6464#3,<xmm15=int6464#16
3912# asm 2: pxor  <xmm2=%xmm2,<xmm15=%xmm15
3913pxor  %xmm2,%xmm15
3914
3915# qhasm:     xmm12 ^= xmm5
3916# asm 1: pxor  <xmm5=int6464#6,<xmm12=int6464#13
3917# asm 2: pxor  <xmm5=%xmm5,<xmm12=%xmm12
3918pxor  %xmm5,%xmm12
3919
3920# qhasm:     xmm0 = shuffle dwords of xmm0 by 0x4E
3921# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
3922# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
3923pshufd $0x4E,%xmm0,%xmm0
3924
3925# qhasm:     xmm1 = shuffle dwords of xmm1 by 0x4E
3926# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
3927# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
3928pshufd $0x4E,%xmm1,%xmm1
3929
3930# qhasm:     xmm4 = shuffle dwords of xmm4 by 0x4E
3931# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
3932# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
3933pshufd $0x4E,%xmm4,%xmm4
3934
3935# qhasm:     xmm6 = shuffle dwords of xmm6 by 0x4E
3936# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
3937# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
3938pshufd $0x4E,%xmm6,%xmm6
3939
3940# qhasm:     xmm3 = shuffle dwords of xmm3 by 0x4E
3941# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
3942# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
3943pshufd $0x4E,%xmm3,%xmm3
3944
3945# qhasm:     xmm7 = shuffle dwords of xmm7 by 0x4E
3946# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
3947# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
3948pshufd $0x4E,%xmm7,%xmm7
3949
3950# qhasm:     xmm2 = shuffle dwords of xmm2 by 0x4E
3951# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
3952# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
3953pshufd $0x4E,%xmm2,%xmm2
3954
3955# qhasm:     xmm5 = shuffle dwords of xmm5 by 0x4E
3956# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
3957# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
3958pshufd $0x4E,%xmm5,%xmm5
3959
3960# qhasm:     xmm8 ^= xmm0
3961# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
3962# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
3963pxor  %xmm0,%xmm8
3964
3965# qhasm:     xmm9 ^= xmm1
3966# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
3967# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
3968pxor  %xmm1,%xmm9
3969
3970# qhasm:     xmm10 ^= xmm4
3971# asm 1: pxor  <xmm4=int6464#5,<xmm10=int6464#11
3972# asm 2: pxor  <xmm4=%xmm4,<xmm10=%xmm10
3973pxor  %xmm4,%xmm10
3974
3975# qhasm:     xmm11 ^= xmm6
3976# asm 1: pxor  <xmm6=int6464#7,<xmm11=int6464#12
3977# asm 2: pxor  <xmm6=%xmm6,<xmm11=%xmm11
3978pxor  %xmm6,%xmm11
3979
3980# qhasm:     xmm12 ^= xmm3
3981# asm 1: pxor  <xmm3=int6464#4,<xmm12=int6464#13
3982# asm 2: pxor  <xmm3=%xmm3,<xmm12=%xmm12
3983pxor  %xmm3,%xmm12
3984
3985# qhasm:     xmm13 ^= xmm7
3986# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
3987# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
3988pxor  %xmm7,%xmm13
3989
3990# qhasm:     xmm14 ^= xmm2
3991# asm 1: pxor  <xmm2=int6464#3,<xmm14=int6464#15
3992# asm 2: pxor  <xmm2=%xmm2,<xmm14=%xmm14
3993pxor  %xmm2,%xmm14
3994
3995# qhasm:     xmm15 ^= xmm5
3996# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
3997# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
3998pxor  %xmm5,%xmm15
3999
4000# qhasm:     xmm8 ^= *(int128 *)(c + 384)
4001# asm 1: pxor 384(<c=int64#4),<xmm8=int6464#9
4002# asm 2: pxor 384(<c=%rcx),<xmm8=%xmm8
4003pxor 384(%rcx),%xmm8
4004
4005# qhasm:     shuffle bytes of xmm8 by SR
4006# asm 1: pshufb SR,<xmm8=int6464#9
4007# asm 2: pshufb SR,<xmm8=%xmm8
4008pshufb SR,%xmm8
4009
4010# qhasm:     xmm9 ^= *(int128 *)(c + 400)
4011# asm 1: pxor 400(<c=int64#4),<xmm9=int6464#10
4012# asm 2: pxor 400(<c=%rcx),<xmm9=%xmm9
4013pxor 400(%rcx),%xmm9
4014
4015# qhasm:     shuffle bytes of xmm9 by SR
4016# asm 1: pshufb SR,<xmm9=int6464#10
4017# asm 2: pshufb SR,<xmm9=%xmm9
4018pshufb SR,%xmm9
4019
4020# qhasm:     xmm10 ^= *(int128 *)(c + 416)
4021# asm 1: pxor 416(<c=int64#4),<xmm10=int6464#11
4022# asm 2: pxor 416(<c=%rcx),<xmm10=%xmm10
4023pxor 416(%rcx),%xmm10
4024
4025# qhasm:     shuffle bytes of xmm10 by SR
4026# asm 1: pshufb SR,<xmm10=int6464#11
4027# asm 2: pshufb SR,<xmm10=%xmm10
4028pshufb SR,%xmm10
4029
4030# qhasm:     xmm11 ^= *(int128 *)(c + 432)
4031# asm 1: pxor 432(<c=int64#4),<xmm11=int6464#12
4032# asm 2: pxor 432(<c=%rcx),<xmm11=%xmm11
4033pxor 432(%rcx),%xmm11
4034
4035# qhasm:     shuffle bytes of xmm11 by SR
4036# asm 1: pshufb SR,<xmm11=int6464#12
4037# asm 2: pshufb SR,<xmm11=%xmm11
4038pshufb SR,%xmm11
4039
4040# qhasm:     xmm12 ^= *(int128 *)(c + 448)
4041# asm 1: pxor 448(<c=int64#4),<xmm12=int6464#13
4042# asm 2: pxor 448(<c=%rcx),<xmm12=%xmm12
4043pxor 448(%rcx),%xmm12
4044
4045# qhasm:     shuffle bytes of xmm12 by SR
4046# asm 1: pshufb SR,<xmm12=int6464#13
4047# asm 2: pshufb SR,<xmm12=%xmm12
4048pshufb SR,%xmm12
4049
4050# qhasm:     xmm13 ^= *(int128 *)(c + 464)
4051# asm 1: pxor 464(<c=int64#4),<xmm13=int6464#14
4052# asm 2: pxor 464(<c=%rcx),<xmm13=%xmm13
4053pxor 464(%rcx),%xmm13
4054
4055# qhasm:     shuffle bytes of xmm13 by SR
4056# asm 1: pshufb SR,<xmm13=int6464#14
4057# asm 2: pshufb SR,<xmm13=%xmm13
4058pshufb SR,%xmm13
4059
4060# qhasm:     xmm14 ^= *(int128 *)(c + 480)
4061# asm 1: pxor 480(<c=int64#4),<xmm14=int6464#15
4062# asm 2: pxor 480(<c=%rcx),<xmm14=%xmm14
4063pxor 480(%rcx),%xmm14
4064
4065# qhasm:     shuffle bytes of xmm14 by SR
4066# asm 1: pshufb SR,<xmm14=int6464#15
4067# asm 2: pshufb SR,<xmm14=%xmm14
4068pshufb SR,%xmm14
4069
4070# qhasm:     xmm15 ^= *(int128 *)(c + 496)
4071# asm 1: pxor 496(<c=int64#4),<xmm15=int6464#16
4072# asm 2: pxor 496(<c=%rcx),<xmm15=%xmm15
4073pxor 496(%rcx),%xmm15
4074
4075# qhasm:     shuffle bytes of xmm15 by SR
4076# asm 1: pshufb SR,<xmm15=int6464#16
4077# asm 2: pshufb SR,<xmm15=%xmm15
4078pshufb SR,%xmm15
4079
4080# qhasm:       xmm13 ^= xmm14
4081# asm 1: pxor  <xmm14=int6464#15,<xmm13=int6464#14
4082# asm 2: pxor  <xmm14=%xmm14,<xmm13=%xmm13
4083pxor  %xmm14,%xmm13
4084
4085# qhasm:       xmm10 ^= xmm9
4086# asm 1: pxor  <xmm9=int6464#10,<xmm10=int6464#11
4087# asm 2: pxor  <xmm9=%xmm9,<xmm10=%xmm10
4088pxor  %xmm9,%xmm10
4089
4090# qhasm:       xmm13 ^= xmm8
4091# asm 1: pxor  <xmm8=int6464#9,<xmm13=int6464#14
4092# asm 2: pxor  <xmm8=%xmm8,<xmm13=%xmm13
4093pxor  %xmm8,%xmm13
4094
4095# qhasm:       xmm14 ^= xmm10
4096# asm 1: pxor  <xmm10=int6464#11,<xmm14=int6464#15
4097# asm 2: pxor  <xmm10=%xmm10,<xmm14=%xmm14
4098pxor  %xmm10,%xmm14
4099
4100# qhasm:       xmm11 ^= xmm8
4101# asm 1: pxor  <xmm8=int6464#9,<xmm11=int6464#12
4102# asm 2: pxor  <xmm8=%xmm8,<xmm11=%xmm11
4103pxor  %xmm8,%xmm11
4104
4105# qhasm:       xmm14 ^= xmm11
4106# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
4107# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
4108pxor  %xmm11,%xmm14
4109
4110# qhasm:       xmm11 ^= xmm15
4111# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#12
4112# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm11
4113pxor  %xmm15,%xmm11
4114
4115# qhasm:       xmm11 ^= xmm12
4116# asm 1: pxor  <xmm12=int6464#13,<xmm11=int6464#12
4117# asm 2: pxor  <xmm12=%xmm12,<xmm11=%xmm11
4118pxor  %xmm12,%xmm11
4119
4120# qhasm:       xmm15 ^= xmm13
4121# asm 1: pxor  <xmm13=int6464#14,<xmm15=int6464#16
4122# asm 2: pxor  <xmm13=%xmm13,<xmm15=%xmm15
4123pxor  %xmm13,%xmm15
4124
4125# qhasm:       xmm11 ^= xmm9
4126# asm 1: pxor  <xmm9=int6464#10,<xmm11=int6464#12
4127# asm 2: pxor  <xmm9=%xmm9,<xmm11=%xmm11
4128pxor  %xmm9,%xmm11
4129
4130# qhasm:       xmm12 ^= xmm13
4131# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
4132# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
4133pxor  %xmm13,%xmm12
4134
4135# qhasm:       xmm10 ^= xmm15
4136# asm 1: pxor  <xmm15=int6464#16,<xmm10=int6464#11
4137# asm 2: pxor  <xmm15=%xmm15,<xmm10=%xmm10
4138pxor  %xmm15,%xmm10
4139
4140# qhasm:       xmm9 ^= xmm13
4141# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
4142# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
4143pxor  %xmm13,%xmm9
4144
4145# qhasm:       xmm3 = xmm15
4146# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
4147# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
4148movdqa %xmm15,%xmm0
4149
4150# qhasm:       xmm2 = xmm9
4151# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
4152# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
4153movdqa %xmm9,%xmm1
4154
4155# qhasm:       xmm1 = xmm13
4156# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
4157# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
4158movdqa %xmm13,%xmm2
4159
4160# qhasm:       xmm5 = xmm10
4161# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
4162# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
4163movdqa %xmm10,%xmm3
4164
4165# qhasm:       xmm4 = xmm14
4166# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
4167# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
4168movdqa %xmm14,%xmm4
4169
4170# qhasm:       xmm3 ^= xmm12
4171# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#1
4172# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm0
4173pxor  %xmm12,%xmm0
4174
4175# qhasm:       xmm2 ^= xmm10
4176# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#2
4177# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm1
4178pxor  %xmm10,%xmm1
4179
4180# qhasm:       xmm1 ^= xmm11
4181# asm 1: pxor  <xmm11=int6464#12,<xmm1=int6464#3
4182# asm 2: pxor  <xmm11=%xmm11,<xmm1=%xmm2
4183pxor  %xmm11,%xmm2
4184
4185# qhasm:       xmm5 ^= xmm12
4186# asm 1: pxor  <xmm12=int6464#13,<xmm5=int6464#4
4187# asm 2: pxor  <xmm12=%xmm12,<xmm5=%xmm3
4188pxor  %xmm12,%xmm3
4189
4190# qhasm:       xmm4 ^= xmm8
4191# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
4192# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
4193pxor  %xmm8,%xmm4
4194
4195# qhasm:       xmm6 = xmm3
4196# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
4197# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
4198movdqa %xmm0,%xmm5
4199
4200# qhasm:       xmm0 = xmm2
4201# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
4202# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
4203movdqa %xmm1,%xmm6
4204
4205# qhasm:       xmm7 = xmm3
4206# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
4207# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
4208movdqa %xmm0,%xmm7
4209
4210# qhasm:       xmm2 |= xmm1
4211# asm 1: por   <xmm1=int6464#3,<xmm2=int6464#2
4212# asm 2: por   <xmm1=%xmm2,<xmm2=%xmm1
4213por   %xmm2,%xmm1
4214
4215# qhasm:       xmm3 |= xmm4
4216# asm 1: por   <xmm4=int6464#5,<xmm3=int6464#1
4217# asm 2: por   <xmm4=%xmm4,<xmm3=%xmm0
4218por   %xmm4,%xmm0
4219
4220# qhasm:       xmm7 ^= xmm0
4221# asm 1: pxor  <xmm0=int6464#7,<xmm7=int6464#8
4222# asm 2: pxor  <xmm0=%xmm6,<xmm7=%xmm7
4223pxor  %xmm6,%xmm7
4224
4225# qhasm:       xmm6 &= xmm4
4226# asm 1: pand  <xmm4=int6464#5,<xmm6=int6464#6
4227# asm 2: pand  <xmm4=%xmm4,<xmm6=%xmm5
4228pand  %xmm4,%xmm5
4229
4230# qhasm:       xmm0 &= xmm1
4231# asm 1: pand  <xmm1=int6464#3,<xmm0=int6464#7
4232# asm 2: pand  <xmm1=%xmm2,<xmm0=%xmm6
4233pand  %xmm2,%xmm6
4234
4235# qhasm:       xmm4 ^= xmm1
4236# asm 1: pxor  <xmm1=int6464#3,<xmm4=int6464#5
4237# asm 2: pxor  <xmm1=%xmm2,<xmm4=%xmm4
4238pxor  %xmm2,%xmm4
4239
4240# qhasm:       xmm7 &= xmm4
4241# asm 1: pand  <xmm4=int6464#5,<xmm7=int6464#8
4242# asm 2: pand  <xmm4=%xmm4,<xmm7=%xmm7
4243pand  %xmm4,%xmm7
4244
4245# qhasm:       xmm4 = xmm11
4246# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
4247# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
4248movdqa %xmm11,%xmm2
4249
4250# qhasm:       xmm4 ^= xmm8
4251# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#3
4252# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm2
4253pxor  %xmm8,%xmm2
4254
4255# qhasm:       xmm5 &= xmm4
4256# asm 1: pand  <xmm4=int6464#3,<xmm5=int6464#4
4257# asm 2: pand  <xmm4=%xmm2,<xmm5=%xmm3
4258pand  %xmm2,%xmm3
4259
4260# qhasm:       xmm3 ^= xmm5
4261# asm 1: pxor  <xmm5=int6464#4,<xmm3=int6464#1
4262# asm 2: pxor  <xmm5=%xmm3,<xmm3=%xmm0
4263pxor  %xmm3,%xmm0
4264
4265# qhasm:       xmm2 ^= xmm5
4266# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
4267# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
4268pxor  %xmm3,%xmm1
4269
4270# qhasm:       xmm5 = xmm15
4271# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
4272# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
4273movdqa %xmm15,%xmm2
4274
4275# qhasm:       xmm5 ^= xmm9
4276# asm 1: pxor  <xmm9=int6464#10,<xmm5=int6464#3
4277# asm 2: pxor  <xmm9=%xmm9,<xmm5=%xmm2
4278pxor  %xmm9,%xmm2
4279
4280# qhasm:       xmm4 = xmm13
4281# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
4282# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
4283movdqa %xmm13,%xmm3
4284
4285# qhasm:       xmm1 = xmm5
4286# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
4287# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
4288movdqa %xmm2,%xmm4
4289
4290# qhasm:       xmm4 ^= xmm14
4291# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#4
4292# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm3
4293pxor  %xmm14,%xmm3
4294
4295# qhasm:       xmm1 |= xmm4
4296# asm 1: por   <xmm4=int6464#4,<xmm1=int6464#5
4297# asm 2: por   <xmm4=%xmm3,<xmm1=%xmm4
4298por   %xmm3,%xmm4
4299
4300# qhasm:       xmm5 &= xmm4
4301# asm 1: pand  <xmm4=int6464#4,<xmm5=int6464#3
4302# asm 2: pand  <xmm4=%xmm3,<xmm5=%xmm2
4303pand  %xmm3,%xmm2
4304
4305# qhasm:       xmm0 ^= xmm5
4306# asm 1: pxor  <xmm5=int6464#3,<xmm0=int6464#7
4307# asm 2: pxor  <xmm5=%xmm2,<xmm0=%xmm6
4308pxor  %xmm2,%xmm6
4309
4310# qhasm:       xmm3 ^= xmm7
4311# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#1
4312# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm0
4313pxor  %xmm7,%xmm0
4314
4315# qhasm:       xmm2 ^= xmm6
4316# asm 1: pxor  <xmm6=int6464#6,<xmm2=int6464#2
4317# asm 2: pxor  <xmm6=%xmm5,<xmm2=%xmm1
4318pxor  %xmm5,%xmm1
4319
4320# qhasm:       xmm1 ^= xmm7
4321# asm 1: pxor  <xmm7=int6464#8,<xmm1=int6464#5
4322# asm 2: pxor  <xmm7=%xmm7,<xmm1=%xmm4
4323pxor  %xmm7,%xmm4
4324
4325# qhasm:       xmm0 ^= xmm6
4326# asm 1: pxor  <xmm6=int6464#6,<xmm0=int6464#7
4327# asm 2: pxor  <xmm6=%xmm5,<xmm0=%xmm6
4328pxor  %xmm5,%xmm6
4329
4330# qhasm:       xmm1 ^= xmm6
4331# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
4332# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
4333pxor  %xmm5,%xmm4
4334
4335# qhasm:       xmm4 = xmm10
4336# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
4337# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
4338movdqa %xmm10,%xmm2
4339
4340# qhasm:       xmm5 = xmm12
4341# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
4342# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
4343movdqa %xmm12,%xmm3
4344
4345# qhasm:       xmm6 = xmm9
4346# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
4347# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
4348movdqa %xmm9,%xmm5
4349
4350# qhasm:       xmm7 = xmm15
4351# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
4352# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
4353movdqa %xmm15,%xmm7
4354
4355# qhasm:       xmm4 &= xmm11
4356# asm 1: pand  <xmm11=int6464#12,<xmm4=int6464#3
4357# asm 2: pand  <xmm11=%xmm11,<xmm4=%xmm2
4358pand  %xmm11,%xmm2
4359
4360# qhasm:       xmm5 &= xmm8
4361# asm 1: pand  <xmm8=int6464#9,<xmm5=int6464#4
4362# asm 2: pand  <xmm8=%xmm8,<xmm5=%xmm3
4363pand  %xmm8,%xmm3
4364
4365# qhasm:       xmm6 &= xmm13
4366# asm 1: pand  <xmm13=int6464#14,<xmm6=int6464#6
4367# asm 2: pand  <xmm13=%xmm13,<xmm6=%xmm5
4368pand  %xmm13,%xmm5
4369
4370# qhasm:       xmm7 |= xmm14
4371# asm 1: por   <xmm14=int6464#15,<xmm7=int6464#8
4372# asm 2: por   <xmm14=%xmm14,<xmm7=%xmm7
4373por   %xmm14,%xmm7
4374
4375# qhasm:       xmm3 ^= xmm4
4376# asm 1: pxor  <xmm4=int6464#3,<xmm3=int6464#1
4377# asm 2: pxor  <xmm4=%xmm2,<xmm3=%xmm0
4378pxor  %xmm2,%xmm0
4379
4380# qhasm:       xmm2 ^= xmm5
4381# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
4382# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
4383pxor  %xmm3,%xmm1
4384
4385# qhasm:       xmm1 ^= xmm6
4386# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
4387# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
4388pxor  %xmm5,%xmm4
4389
4390# qhasm:       xmm0 ^= xmm7
4391# asm 1: pxor  <xmm7=int6464#8,<xmm0=int6464#7
4392# asm 2: pxor  <xmm7=%xmm7,<xmm0=%xmm6
4393pxor  %xmm7,%xmm6
4394
4395# qhasm:       xmm4 = xmm3
4396# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
4397# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
4398movdqa %xmm0,%xmm2
4399
4400# qhasm:       xmm4 ^= xmm2
4401# asm 1: pxor  <xmm2=int6464#2,<xmm4=int6464#3
4402# asm 2: pxor  <xmm2=%xmm1,<xmm4=%xmm2
4403pxor  %xmm1,%xmm2
4404
4405# qhasm:       xmm3 &= xmm1
4406# asm 1: pand  <xmm1=int6464#5,<xmm3=int6464#1
4407# asm 2: pand  <xmm1=%xmm4,<xmm3=%xmm0
4408pand  %xmm4,%xmm0
4409
4410# qhasm:       xmm6 = xmm0
4411# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
4412# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
4413movdqa %xmm6,%xmm3
4414
4415# qhasm:       xmm6 ^= xmm3
4416# asm 1: pxor  <xmm3=int6464#1,<xmm6=int6464#4
4417# asm 2: pxor  <xmm3=%xmm0,<xmm6=%xmm3
4418pxor  %xmm0,%xmm3
4419
4420# qhasm:       xmm7 = xmm4
4421# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
4422# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
4423movdqa %xmm2,%xmm5
4424
4425# qhasm:       xmm7 &= xmm6
4426# asm 1: pand  <xmm6=int6464#4,<xmm7=int6464#6
4427# asm 2: pand  <xmm6=%xmm3,<xmm7=%xmm5
4428pand  %xmm3,%xmm5
4429
4430# qhasm:       xmm7 ^= xmm2
4431# asm 1: pxor  <xmm2=int6464#2,<xmm7=int6464#6
4432# asm 2: pxor  <xmm2=%xmm1,<xmm7=%xmm5
4433pxor  %xmm1,%xmm5
4434
4435# qhasm:       xmm5 = xmm1
4436# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
4437# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
4438movdqa %xmm4,%xmm7
4439
4440# qhasm:       xmm5 ^= xmm0
4441# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
4442# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
4443pxor  %xmm6,%xmm7
4444
4445# qhasm:       xmm3 ^= xmm2
4446# asm 1: pxor  <xmm2=int6464#2,<xmm3=int6464#1
4447# asm 2: pxor  <xmm2=%xmm1,<xmm3=%xmm0
4448pxor  %xmm1,%xmm0
4449
4450# qhasm:       xmm5 &= xmm3
4451# asm 1: pand  <xmm3=int6464#1,<xmm5=int6464#8
4452# asm 2: pand  <xmm3=%xmm0,<xmm5=%xmm7
4453pand  %xmm0,%xmm7
4454
4455# qhasm:       xmm5 ^= xmm0
4456# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
4457# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
4458pxor  %xmm6,%xmm7
4459
4460# qhasm:       xmm1 ^= xmm5
4461# asm 1: pxor  <xmm5=int6464#8,<xmm1=int6464#5
4462# asm 2: pxor  <xmm5=%xmm7,<xmm1=%xmm4
4463pxor  %xmm7,%xmm4
4464
4465# qhasm:       xmm2 = xmm6
4466# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
4467# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
4468movdqa %xmm3,%xmm0
4469
4470# qhasm:       xmm2 ^= xmm5
4471# asm 1: pxor  <xmm5=int6464#8,<xmm2=int6464#1
4472# asm 2: pxor  <xmm5=%xmm7,<xmm2=%xmm0
4473pxor  %xmm7,%xmm0
4474
4475# qhasm:       xmm2 &= xmm0
4476# asm 1: pand  <xmm0=int6464#7,<xmm2=int6464#1
4477# asm 2: pand  <xmm0=%xmm6,<xmm2=%xmm0
4478pand  %xmm6,%xmm0
4479
4480# qhasm:       xmm1 ^= xmm2
4481# asm 1: pxor  <xmm2=int6464#1,<xmm1=int6464#5
4482# asm 2: pxor  <xmm2=%xmm0,<xmm1=%xmm4
4483pxor  %xmm0,%xmm4
4484
4485# qhasm:       xmm6 ^= xmm2
4486# asm 1: pxor  <xmm2=int6464#1,<xmm6=int6464#4
4487# asm 2: pxor  <xmm2=%xmm0,<xmm6=%xmm3
4488pxor  %xmm0,%xmm3
4489
4490# qhasm:       xmm6 &= xmm7
4491# asm 1: pand  <xmm7=int6464#6,<xmm6=int6464#4
4492# asm 2: pand  <xmm7=%xmm5,<xmm6=%xmm3
4493pand  %xmm5,%xmm3
4494
4495# qhasm:       xmm6 ^= xmm4
4496# asm 1: pxor  <xmm4=int6464#3,<xmm6=int6464#4
4497# asm 2: pxor  <xmm4=%xmm2,<xmm6=%xmm3
4498pxor  %xmm2,%xmm3
4499
4500# qhasm:         xmm4 = xmm14
4501# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
4502# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
4503movdqa %xmm14,%xmm0
4504
4505# qhasm:         xmm0 = xmm13
4506# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
4507# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
4508movdqa %xmm13,%xmm1
4509
4510# qhasm:           xmm2 = xmm7
4511# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
4512# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
4513movdqa %xmm5,%xmm2
4514
4515# qhasm:           xmm2 ^= xmm6
4516# asm 1: pxor  <xmm6=int6464#4,<xmm2=int6464#3
4517# asm 2: pxor  <xmm6=%xmm3,<xmm2=%xmm2
4518pxor  %xmm3,%xmm2
4519
4520# qhasm:           xmm2 &= xmm14
4521# asm 1: pand  <xmm14=int6464#15,<xmm2=int6464#3
4522# asm 2: pand  <xmm14=%xmm14,<xmm2=%xmm2
4523pand  %xmm14,%xmm2
4524
4525# qhasm:           xmm14 ^= xmm13
4526# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
4527# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
4528pxor  %xmm13,%xmm14
4529
4530# qhasm:           xmm14 &= xmm6
4531# asm 1: pand  <xmm6=int6464#4,<xmm14=int6464#15
4532# asm 2: pand  <xmm6=%xmm3,<xmm14=%xmm14
4533pand  %xmm3,%xmm14
4534
4535# qhasm:           xmm13 &= xmm7
4536# asm 1: pand  <xmm7=int6464#6,<xmm13=int6464#14
4537# asm 2: pand  <xmm7=%xmm5,<xmm13=%xmm13
4538pand  %xmm5,%xmm13
4539
4540# qhasm:           xmm14 ^= xmm13
4541# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
4542# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
4543pxor  %xmm13,%xmm14
4544
4545# qhasm:           xmm13 ^= xmm2
4546# asm 1: pxor  <xmm2=int6464#3,<xmm13=int6464#14
4547# asm 2: pxor  <xmm2=%xmm2,<xmm13=%xmm13
4548pxor  %xmm2,%xmm13
4549
4550# qhasm:         xmm4 ^= xmm8
4551# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#1
4552# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm0
4553pxor  %xmm8,%xmm0
4554
4555# qhasm:         xmm0 ^= xmm11
4556# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#2
4557# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm1
4558pxor  %xmm11,%xmm1
4559
4560# qhasm:         xmm7 ^= xmm5
4561# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
4562# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
4563pxor  %xmm7,%xmm5
4564
4565# qhasm:         xmm6 ^= xmm1
4566# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
4567# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
4568pxor  %xmm4,%xmm3
4569
4570# qhasm:           xmm3 = xmm7
4571# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4572# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4573movdqa %xmm5,%xmm2
4574
4575# qhasm:           xmm3 ^= xmm6
4576# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
4577# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
4578pxor  %xmm3,%xmm2
4579
4580# qhasm:           xmm3 &= xmm4
4581# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
4582# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
4583pand  %xmm0,%xmm2
4584
4585# qhasm:           xmm4 ^= xmm0
4586# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
4587# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
4588pxor  %xmm1,%xmm0
4589
4590# qhasm:           xmm4 &= xmm6
4591# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
4592# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
4593pand  %xmm3,%xmm0
4594
4595# qhasm:           xmm0 &= xmm7
4596# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
4597# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
4598pand  %xmm5,%xmm1
4599
4600# qhasm:           xmm0 ^= xmm4
4601# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
4602# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
4603pxor  %xmm0,%xmm1
4604
4605# qhasm:           xmm4 ^= xmm3
4606# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
4607# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
4608pxor  %xmm2,%xmm0
4609
4610# qhasm:           xmm2 = xmm5
4611# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4612# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4613movdqa %xmm7,%xmm2
4614
4615# qhasm:           xmm2 ^= xmm1
4616# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
4617# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
4618pxor  %xmm4,%xmm2
4619
4620# qhasm:           xmm2 &= xmm8
4621# asm 1: pand  <xmm8=int6464#9,<xmm2=int6464#3
4622# asm 2: pand  <xmm8=%xmm8,<xmm2=%xmm2
4623pand  %xmm8,%xmm2
4624
4625# qhasm:           xmm8 ^= xmm11
4626# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
4627# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
4628pxor  %xmm11,%xmm8
4629
4630# qhasm:           xmm8 &= xmm1
4631# asm 1: pand  <xmm1=int6464#5,<xmm8=int6464#9
4632# asm 2: pand  <xmm1=%xmm4,<xmm8=%xmm8
4633pand  %xmm4,%xmm8
4634
4635# qhasm:           xmm11 &= xmm5
4636# asm 1: pand  <xmm5=int6464#8,<xmm11=int6464#12
4637# asm 2: pand  <xmm5=%xmm7,<xmm11=%xmm11
4638pand  %xmm7,%xmm11
4639
4640# qhasm:           xmm8 ^= xmm11
4641# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
4642# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
4643pxor  %xmm11,%xmm8
4644
4645# qhasm:           xmm11 ^= xmm2
4646# asm 1: pxor  <xmm2=int6464#3,<xmm11=int6464#12
4647# asm 2: pxor  <xmm2=%xmm2,<xmm11=%xmm11
4648pxor  %xmm2,%xmm11
4649
4650# qhasm:         xmm14 ^= xmm4
4651# asm 1: pxor  <xmm4=int6464#1,<xmm14=int6464#15
4652# asm 2: pxor  <xmm4=%xmm0,<xmm14=%xmm14
4653pxor  %xmm0,%xmm14
4654
4655# qhasm:         xmm8 ^= xmm4
4656# asm 1: pxor  <xmm4=int6464#1,<xmm8=int6464#9
4657# asm 2: pxor  <xmm4=%xmm0,<xmm8=%xmm8
4658pxor  %xmm0,%xmm8
4659
4660# qhasm:         xmm13 ^= xmm0
4661# asm 1: pxor  <xmm0=int6464#2,<xmm13=int6464#14
4662# asm 2: pxor  <xmm0=%xmm1,<xmm13=%xmm13
4663pxor  %xmm1,%xmm13
4664
4665# qhasm:         xmm11 ^= xmm0
4666# asm 1: pxor  <xmm0=int6464#2,<xmm11=int6464#12
4667# asm 2: pxor  <xmm0=%xmm1,<xmm11=%xmm11
4668pxor  %xmm1,%xmm11
4669
4670# qhasm:         xmm4 = xmm15
4671# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
4672# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
4673movdqa %xmm15,%xmm0
4674
4675# qhasm:         xmm0 = xmm9
4676# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
4677# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
4678movdqa %xmm9,%xmm1
4679
4680# qhasm:         xmm4 ^= xmm12
4681# asm 1: pxor  <xmm12=int6464#13,<xmm4=int6464#1
4682# asm 2: pxor  <xmm12=%xmm12,<xmm4=%xmm0
4683pxor  %xmm12,%xmm0
4684
4685# qhasm:         xmm0 ^= xmm10
4686# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#2
4687# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm1
4688pxor  %xmm10,%xmm1
4689
4690# qhasm:           xmm3 = xmm7
4691# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4692# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4693movdqa %xmm5,%xmm2
4694
4695# qhasm:           xmm3 ^= xmm6
4696# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
4697# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
4698pxor  %xmm3,%xmm2
4699
4700# qhasm:           xmm3 &= xmm4
4701# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
4702# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
4703pand  %xmm0,%xmm2
4704
4705# qhasm:           xmm4 ^= xmm0
4706# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
4707# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
4708pxor  %xmm1,%xmm0
4709
4710# qhasm:           xmm4 &= xmm6
4711# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
4712# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
4713pand  %xmm3,%xmm0
4714
4715# qhasm:           xmm0 &= xmm7
4716# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
4717# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
4718pand  %xmm5,%xmm1
4719
4720# qhasm:           xmm0 ^= xmm4
4721# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
4722# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
4723pxor  %xmm0,%xmm1
4724
4725# qhasm:           xmm4 ^= xmm3
4726# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
4727# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
4728pxor  %xmm2,%xmm0
4729
4730# qhasm:           xmm2 = xmm5
4731# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4732# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4733movdqa %xmm7,%xmm2
4734
4735# qhasm:           xmm2 ^= xmm1
4736# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
4737# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
4738pxor  %xmm4,%xmm2
4739
4740# qhasm:           xmm2 &= xmm12
4741# asm 1: pand  <xmm12=int6464#13,<xmm2=int6464#3
4742# asm 2: pand  <xmm12=%xmm12,<xmm2=%xmm2
4743pand  %xmm12,%xmm2
4744
4745# qhasm:           xmm12 ^= xmm10
4746# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
4747# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
4748pxor  %xmm10,%xmm12
4749
4750# qhasm:           xmm12 &= xmm1
4751# asm 1: pand  <xmm1=int6464#5,<xmm12=int6464#13
4752# asm 2: pand  <xmm1=%xmm4,<xmm12=%xmm12
4753pand  %xmm4,%xmm12
4754
4755# qhasm:           xmm10 &= xmm5
4756# asm 1: pand  <xmm5=int6464#8,<xmm10=int6464#11
4757# asm 2: pand  <xmm5=%xmm7,<xmm10=%xmm10
4758pand  %xmm7,%xmm10
4759
4760# qhasm:           xmm12 ^= xmm10
4761# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
4762# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
4763pxor  %xmm10,%xmm12
4764
4765# qhasm:           xmm10 ^= xmm2
4766# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#11
4767# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm10
4768pxor  %xmm2,%xmm10
4769
4770# qhasm:         xmm7 ^= xmm5
4771# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
4772# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
4773pxor  %xmm7,%xmm5
4774
4775# qhasm:         xmm6 ^= xmm1
4776# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
4777# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
4778pxor  %xmm4,%xmm3
4779
4780# qhasm:           xmm3 = xmm7
4781# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4782# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4783movdqa %xmm5,%xmm2
4784
4785# qhasm:           xmm3 ^= xmm6
4786# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
4787# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
4788pxor  %xmm3,%xmm2
4789
4790# qhasm:           xmm3 &= xmm15
4791# asm 1: pand  <xmm15=int6464#16,<xmm3=int6464#3
4792# asm 2: pand  <xmm15=%xmm15,<xmm3=%xmm2
4793pand  %xmm15,%xmm2
4794
4795# qhasm:           xmm15 ^= xmm9
4796# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
4797# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
4798pxor  %xmm9,%xmm15
4799
4800# qhasm:           xmm15 &= xmm6
4801# asm 1: pand  <xmm6=int6464#4,<xmm15=int6464#16
4802# asm 2: pand  <xmm6=%xmm3,<xmm15=%xmm15
4803pand  %xmm3,%xmm15
4804
4805# qhasm:           xmm9 &= xmm7
4806# asm 1: pand  <xmm7=int6464#6,<xmm9=int6464#10
4807# asm 2: pand  <xmm7=%xmm5,<xmm9=%xmm9
4808pand  %xmm5,%xmm9
4809
4810# qhasm:           xmm15 ^= xmm9
4811# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
4812# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
4813pxor  %xmm9,%xmm15
4814
4815# qhasm:           xmm9 ^= xmm3
4816# asm 1: pxor  <xmm3=int6464#3,<xmm9=int6464#10
4817# asm 2: pxor  <xmm3=%xmm2,<xmm9=%xmm9
4818pxor  %xmm2,%xmm9
4819
4820# qhasm:         xmm15 ^= xmm4
4821# asm 1: pxor  <xmm4=int6464#1,<xmm15=int6464#16
4822# asm 2: pxor  <xmm4=%xmm0,<xmm15=%xmm15
4823pxor  %xmm0,%xmm15
4824
4825# qhasm:         xmm12 ^= xmm4
4826# asm 1: pxor  <xmm4=int6464#1,<xmm12=int6464#13
4827# asm 2: pxor  <xmm4=%xmm0,<xmm12=%xmm12
4828pxor  %xmm0,%xmm12
4829
4830# qhasm:         xmm9 ^= xmm0
4831# asm 1: pxor  <xmm0=int6464#2,<xmm9=int6464#10
4832# asm 2: pxor  <xmm0=%xmm1,<xmm9=%xmm9
4833pxor  %xmm1,%xmm9
4834
4835# qhasm:         xmm10 ^= xmm0
4836# asm 1: pxor  <xmm0=int6464#2,<xmm10=int6464#11
4837# asm 2: pxor  <xmm0=%xmm1,<xmm10=%xmm10
4838pxor  %xmm1,%xmm10
4839
4840# qhasm:       xmm15 ^= xmm8
4841# asm 1: pxor  <xmm8=int6464#9,<xmm15=int6464#16
4842# asm 2: pxor  <xmm8=%xmm8,<xmm15=%xmm15
4843pxor  %xmm8,%xmm15
4844
4845# qhasm:       xmm9 ^= xmm14
4846# asm 1: pxor  <xmm14=int6464#15,<xmm9=int6464#10
4847# asm 2: pxor  <xmm14=%xmm14,<xmm9=%xmm9
4848pxor  %xmm14,%xmm9
4849
4850# qhasm:       xmm12 ^= xmm15
4851# asm 1: pxor  <xmm15=int6464#16,<xmm12=int6464#13
4852# asm 2: pxor  <xmm15=%xmm15,<xmm12=%xmm12
4853pxor  %xmm15,%xmm12
4854
4855# qhasm:       xmm14 ^= xmm8
4856# asm 1: pxor  <xmm8=int6464#9,<xmm14=int6464#15
4857# asm 2: pxor  <xmm8=%xmm8,<xmm14=%xmm14
4858pxor  %xmm8,%xmm14
4859
4860# qhasm:       xmm8 ^= xmm9
4861# asm 1: pxor  <xmm9=int6464#10,<xmm8=int6464#9
4862# asm 2: pxor  <xmm9=%xmm9,<xmm8=%xmm8
4863pxor  %xmm9,%xmm8
4864
4865# qhasm:       xmm9 ^= xmm13
4866# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
4867# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
4868pxor  %xmm13,%xmm9
4869
4870# qhasm:       xmm13 ^= xmm10
4871# asm 1: pxor  <xmm10=int6464#11,<xmm13=int6464#14
4872# asm 2: pxor  <xmm10=%xmm10,<xmm13=%xmm13
4873pxor  %xmm10,%xmm13
4874
4875# qhasm:       xmm12 ^= xmm13
4876# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
4877# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
4878pxor  %xmm13,%xmm12
4879
4880# qhasm:       xmm10 ^= xmm11
4881# asm 1: pxor  <xmm11=int6464#12,<xmm10=int6464#11
4882# asm 2: pxor  <xmm11=%xmm11,<xmm10=%xmm10
4883pxor  %xmm11,%xmm10
4884
4885# qhasm:       xmm11 ^= xmm13
4886# asm 1: pxor  <xmm13=int6464#14,<xmm11=int6464#12
4887# asm 2: pxor  <xmm13=%xmm13,<xmm11=%xmm11
4888pxor  %xmm13,%xmm11
4889
4890# qhasm:       xmm14 ^= xmm11
4891# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
4892# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
4893pxor  %xmm11,%xmm14
4894
4895# qhasm:     xmm0 = shuffle dwords of xmm8 by 0x93
4896# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
4897# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
4898pshufd $0x93,%xmm8,%xmm0
4899
4900# qhasm:     xmm1 = shuffle dwords of xmm9 by 0x93
4901# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
4902# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
4903pshufd $0x93,%xmm9,%xmm1
4904
4905# qhasm:     xmm2 = shuffle dwords of xmm12 by 0x93
4906# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
4907# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
4908pshufd $0x93,%xmm12,%xmm2
4909
4910# qhasm:     xmm3 = shuffle dwords of xmm14 by 0x93
4911# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
4912# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
4913pshufd $0x93,%xmm14,%xmm3
4914
4915# qhasm:     xmm4 = shuffle dwords of xmm11 by 0x93
4916# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
4917# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
4918pshufd $0x93,%xmm11,%xmm4
4919
4920# qhasm:     xmm5 = shuffle dwords of xmm15 by 0x93
4921# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
4922# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
4923pshufd $0x93,%xmm15,%xmm5
4924
4925# qhasm:     xmm6 = shuffle dwords of xmm10 by 0x93
4926# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
4927# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
4928pshufd $0x93,%xmm10,%xmm6
4929
4930# qhasm:     xmm7 = shuffle dwords of xmm13 by 0x93
4931# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
4932# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
4933pshufd $0x93,%xmm13,%xmm7
4934
4935# qhasm:     xmm8 ^= xmm0
4936# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
4937# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
4938pxor  %xmm0,%xmm8
4939
4940# qhasm:     xmm9 ^= xmm1
4941# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
4942# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
4943pxor  %xmm1,%xmm9
4944
4945# qhasm:     xmm12 ^= xmm2
4946# asm 1: pxor  <xmm2=int6464#3,<xmm12=int6464#13
4947# asm 2: pxor  <xmm2=%xmm2,<xmm12=%xmm12
4948pxor  %xmm2,%xmm12
4949
4950# qhasm:     xmm14 ^= xmm3
4951# asm 1: pxor  <xmm3=int6464#4,<xmm14=int6464#15
4952# asm 2: pxor  <xmm3=%xmm3,<xmm14=%xmm14
4953pxor  %xmm3,%xmm14
4954
4955# qhasm:     xmm11 ^= xmm4
4956# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
4957# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
4958pxor  %xmm4,%xmm11
4959
4960# qhasm:     xmm15 ^= xmm5
4961# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
4962# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
4963pxor  %xmm5,%xmm15
4964
4965# qhasm:     xmm10 ^= xmm6
4966# asm 1: pxor  <xmm6=int6464#7,<xmm10=int6464#11
4967# asm 2: pxor  <xmm6=%xmm6,<xmm10=%xmm10
4968pxor  %xmm6,%xmm10
4969
4970# qhasm:     xmm13 ^= xmm7
4971# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
4972# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
4973pxor  %xmm7,%xmm13
4974
4975# qhasm:     xmm0 ^= xmm13
4976# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
4977# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
4978pxor  %xmm13,%xmm0
4979
4980# qhasm:     xmm1 ^= xmm8
4981# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
4982# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
4983pxor  %xmm8,%xmm1
4984
4985# qhasm:     xmm2 ^= xmm9
4986# asm 1: pxor  <xmm9=int6464#10,<xmm2=int6464#3
4987# asm 2: pxor  <xmm9=%xmm9,<xmm2=%xmm2
4988pxor  %xmm9,%xmm2
4989
4990# qhasm:     xmm1 ^= xmm13
4991# asm 1: pxor  <xmm13=int6464#14,<xmm1=int6464#2
4992# asm 2: pxor  <xmm13=%xmm13,<xmm1=%xmm1
4993pxor  %xmm13,%xmm1
4994
4995# qhasm:     xmm3 ^= xmm12
4996# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
4997# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
4998pxor  %xmm12,%xmm3
4999
5000# qhasm:     xmm4 ^= xmm14
5001# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#5
5002# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm4
5003pxor  %xmm14,%xmm4
5004
5005# qhasm:     xmm5 ^= xmm11
5006# asm 1: pxor  <xmm11=int6464#12,<xmm5=int6464#6
5007# asm 2: pxor  <xmm11=%xmm11,<xmm5=%xmm5
5008pxor  %xmm11,%xmm5
5009
5010# qhasm:     xmm3 ^= xmm13
5011# asm 1: pxor  <xmm13=int6464#14,<xmm3=int6464#4
5012# asm 2: pxor  <xmm13=%xmm13,<xmm3=%xmm3
5013pxor  %xmm13,%xmm3
5014
5015# qhasm:     xmm6 ^= xmm15
5016# asm 1: pxor  <xmm15=int6464#16,<xmm6=int6464#7
5017# asm 2: pxor  <xmm15=%xmm15,<xmm6=%xmm6
5018pxor  %xmm15,%xmm6
5019
5020# qhasm:     xmm7 ^= xmm10
5021# asm 1: pxor  <xmm10=int6464#11,<xmm7=int6464#8
5022# asm 2: pxor  <xmm10=%xmm10,<xmm7=%xmm7
5023pxor  %xmm10,%xmm7
5024
5025# qhasm:     xmm4 ^= xmm13
5026# asm 1: pxor  <xmm13=int6464#14,<xmm4=int6464#5
5027# asm 2: pxor  <xmm13=%xmm13,<xmm4=%xmm4
5028pxor  %xmm13,%xmm4
5029
5030# qhasm:     xmm8 = shuffle dwords of xmm8 by 0x4E
5031# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
5032# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
5033pshufd $0x4E,%xmm8,%xmm8
5034
5035# qhasm:     xmm9 = shuffle dwords of xmm9 by 0x4E
5036# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
5037# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
5038pshufd $0x4E,%xmm9,%xmm9
5039
5040# qhasm:     xmm12 = shuffle dwords of xmm12 by 0x4E
5041# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
5042# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
5043pshufd $0x4E,%xmm12,%xmm12
5044
5045# qhasm:     xmm14 = shuffle dwords of xmm14 by 0x4E
5046# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
5047# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
5048pshufd $0x4E,%xmm14,%xmm14
5049
5050# qhasm:     xmm11 = shuffle dwords of xmm11 by 0x4E
5051# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
5052# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
5053pshufd $0x4E,%xmm11,%xmm11
5054
5055# qhasm:     xmm15 = shuffle dwords of xmm15 by 0x4E
5056# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
5057# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
5058pshufd $0x4E,%xmm15,%xmm15
5059
5060# qhasm:     xmm10 = shuffle dwords of xmm10 by 0x4E
5061# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
5062# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
5063pshufd $0x4E,%xmm10,%xmm10
5064
5065# qhasm:     xmm13 = shuffle dwords of xmm13 by 0x4E
5066# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
5067# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
5068pshufd $0x4E,%xmm13,%xmm13
5069
5070# qhasm:     xmm0 ^= xmm8
5071# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
5072# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
5073pxor  %xmm8,%xmm0
5074
5075# qhasm:     xmm1 ^= xmm9
5076# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
5077# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
5078pxor  %xmm9,%xmm1
5079
5080# qhasm:     xmm2 ^= xmm12
5081# asm 1: pxor  <xmm12=int6464#13,<xmm2=int6464#3
5082# asm 2: pxor  <xmm12=%xmm12,<xmm2=%xmm2
5083pxor  %xmm12,%xmm2
5084
5085# qhasm:     xmm3 ^= xmm14
5086# asm 1: pxor  <xmm14=int6464#15,<xmm3=int6464#4
5087# asm 2: pxor  <xmm14=%xmm14,<xmm3=%xmm3
5088pxor  %xmm14,%xmm3
5089
5090# qhasm:     xmm4 ^= xmm11
5091# asm 1: pxor  <xmm11=int6464#12,<xmm4=int6464#5
5092# asm 2: pxor  <xmm11=%xmm11,<xmm4=%xmm4
5093pxor  %xmm11,%xmm4
5094
5095# qhasm:     xmm5 ^= xmm15
5096# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
5097# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
5098pxor  %xmm15,%xmm5
5099
5100# qhasm:     xmm6 ^= xmm10
5101# asm 1: pxor  <xmm10=int6464#11,<xmm6=int6464#7
5102# asm 2: pxor  <xmm10=%xmm10,<xmm6=%xmm6
5103pxor  %xmm10,%xmm6
5104
5105# qhasm:     xmm7 ^= xmm13
5106# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
5107# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
5108pxor  %xmm13,%xmm7
5109
5110# qhasm:     xmm0 ^= *(int128 *)(c + 512)
5111# asm 1: pxor 512(<c=int64#4),<xmm0=int6464#1
5112# asm 2: pxor 512(<c=%rcx),<xmm0=%xmm0
5113pxor 512(%rcx),%xmm0
5114
5115# qhasm:     shuffle bytes of xmm0 by SR
5116# asm 1: pshufb SR,<xmm0=int6464#1
5117# asm 2: pshufb SR,<xmm0=%xmm0
5118pshufb SR,%xmm0
5119
5120# qhasm:     xmm1 ^= *(int128 *)(c + 528)
5121# asm 1: pxor 528(<c=int64#4),<xmm1=int6464#2
5122# asm 2: pxor 528(<c=%rcx),<xmm1=%xmm1
5123pxor 528(%rcx),%xmm1
5124
5125# qhasm:     shuffle bytes of xmm1 by SR
5126# asm 1: pshufb SR,<xmm1=int6464#2
5127# asm 2: pshufb SR,<xmm1=%xmm1
5128pshufb SR,%xmm1
5129
5130# qhasm:     xmm2 ^= *(int128 *)(c + 544)
5131# asm 1: pxor 544(<c=int64#4),<xmm2=int6464#3
5132# asm 2: pxor 544(<c=%rcx),<xmm2=%xmm2
5133pxor 544(%rcx),%xmm2
5134
5135# qhasm:     shuffle bytes of xmm2 by SR
5136# asm 1: pshufb SR,<xmm2=int6464#3
5137# asm 2: pshufb SR,<xmm2=%xmm2
5138pshufb SR,%xmm2
5139
5140# qhasm:     xmm3 ^= *(int128 *)(c + 560)
5141# asm 1: pxor 560(<c=int64#4),<xmm3=int6464#4
5142# asm 2: pxor 560(<c=%rcx),<xmm3=%xmm3
5143pxor 560(%rcx),%xmm3
5144
5145# qhasm:     shuffle bytes of xmm3 by SR
5146# asm 1: pshufb SR,<xmm3=int6464#4
5147# asm 2: pshufb SR,<xmm3=%xmm3
5148pshufb SR,%xmm3
5149
5150# qhasm:     xmm4 ^= *(int128 *)(c + 576)
5151# asm 1: pxor 576(<c=int64#4),<xmm4=int6464#5
5152# asm 2: pxor 576(<c=%rcx),<xmm4=%xmm4
5153pxor 576(%rcx),%xmm4
5154
5155# qhasm:     shuffle bytes of xmm4 by SR
5156# asm 1: pshufb SR,<xmm4=int6464#5
5157# asm 2: pshufb SR,<xmm4=%xmm4
5158pshufb SR,%xmm4
5159
5160# qhasm:     xmm5 ^= *(int128 *)(c + 592)
5161# asm 1: pxor 592(<c=int64#4),<xmm5=int6464#6
5162# asm 2: pxor 592(<c=%rcx),<xmm5=%xmm5
5163pxor 592(%rcx),%xmm5
5164
5165# qhasm:     shuffle bytes of xmm5 by SR
5166# asm 1: pshufb SR,<xmm5=int6464#6
5167# asm 2: pshufb SR,<xmm5=%xmm5
5168pshufb SR,%xmm5
5169
5170# qhasm:     xmm6 ^= *(int128 *)(c + 608)
5171# asm 1: pxor 608(<c=int64#4),<xmm6=int6464#7
5172# asm 2: pxor 608(<c=%rcx),<xmm6=%xmm6
5173pxor 608(%rcx),%xmm6
5174
5175# qhasm:     shuffle bytes of xmm6 by SR
5176# asm 1: pshufb SR,<xmm6=int6464#7
5177# asm 2: pshufb SR,<xmm6=%xmm6
5178pshufb SR,%xmm6
5179
5180# qhasm:     xmm7 ^= *(int128 *)(c + 624)
5181# asm 1: pxor 624(<c=int64#4),<xmm7=int6464#8
5182# asm 2: pxor 624(<c=%rcx),<xmm7=%xmm7
5183pxor 624(%rcx),%xmm7
5184
5185# qhasm:     shuffle bytes of xmm7 by SR
5186# asm 1: pshufb SR,<xmm7=int6464#8
5187# asm 2: pshufb SR,<xmm7=%xmm7
5188pshufb SR,%xmm7
5189
5190# qhasm:       xmm5 ^= xmm6
5191# asm 1: pxor  <xmm6=int6464#7,<xmm5=int6464#6
5192# asm 2: pxor  <xmm6=%xmm6,<xmm5=%xmm5
5193pxor  %xmm6,%xmm5
5194
5195# qhasm:       xmm2 ^= xmm1
5196# asm 1: pxor  <xmm1=int6464#2,<xmm2=int6464#3
5197# asm 2: pxor  <xmm1=%xmm1,<xmm2=%xmm2
5198pxor  %xmm1,%xmm2
5199
5200# qhasm:       xmm5 ^= xmm0
5201# asm 1: pxor  <xmm0=int6464#1,<xmm5=int6464#6
5202# asm 2: pxor  <xmm0=%xmm0,<xmm5=%xmm5
5203pxor  %xmm0,%xmm5
5204
5205# qhasm:       xmm6 ^= xmm2
5206# asm 1: pxor  <xmm2=int6464#3,<xmm6=int6464#7
5207# asm 2: pxor  <xmm2=%xmm2,<xmm6=%xmm6
5208pxor  %xmm2,%xmm6
5209
5210# qhasm:       xmm3 ^= xmm0
5211# asm 1: pxor  <xmm0=int6464#1,<xmm3=int6464#4
5212# asm 2: pxor  <xmm0=%xmm0,<xmm3=%xmm3
5213pxor  %xmm0,%xmm3
5214
5215# qhasm:       xmm6 ^= xmm3
5216# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
5217# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
5218pxor  %xmm3,%xmm6
5219
5220# qhasm:       xmm3 ^= xmm7
5221# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#4
5222# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm3
5223pxor  %xmm7,%xmm3
5224
5225# qhasm:       xmm3 ^= xmm4
5226# asm 1: pxor  <xmm4=int6464#5,<xmm3=int6464#4
5227# asm 2: pxor  <xmm4=%xmm4,<xmm3=%xmm3
5228pxor  %xmm4,%xmm3
5229
5230# qhasm:       xmm7 ^= xmm5
5231# asm 1: pxor  <xmm5=int6464#6,<xmm7=int6464#8
5232# asm 2: pxor  <xmm5=%xmm5,<xmm7=%xmm7
5233pxor  %xmm5,%xmm7
5234
5235# qhasm:       xmm3 ^= xmm1
5236# asm 1: pxor  <xmm1=int6464#2,<xmm3=int6464#4
5237# asm 2: pxor  <xmm1=%xmm1,<xmm3=%xmm3
5238pxor  %xmm1,%xmm3
5239
5240# qhasm:       xmm4 ^= xmm5
5241# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
5242# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
5243pxor  %xmm5,%xmm4
5244
5245# qhasm:       xmm2 ^= xmm7
5246# asm 1: pxor  <xmm7=int6464#8,<xmm2=int6464#3
5247# asm 2: pxor  <xmm7=%xmm7,<xmm2=%xmm2
5248pxor  %xmm7,%xmm2
5249
5250# qhasm:       xmm1 ^= xmm5
5251# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
5252# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
5253pxor  %xmm5,%xmm1
5254
5255# qhasm:       xmm11 = xmm7
5256# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5257# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5258movdqa %xmm7,%xmm8
5259
5260# qhasm:       xmm10 = xmm1
5261# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5262# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5263movdqa %xmm1,%xmm9
5264
5265# qhasm:       xmm9 = xmm5
5266# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5267# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5268movdqa %xmm5,%xmm10
5269
5270# qhasm:       xmm13 = xmm2
5271# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5272# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5273movdqa %xmm2,%xmm11
5274
5275# qhasm:       xmm12 = xmm6
5276# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5277# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5278movdqa %xmm6,%xmm12
5279
5280# qhasm:       xmm11 ^= xmm4
5281# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#9
5282# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm8
5283pxor  %xmm4,%xmm8
5284
5285# qhasm:       xmm10 ^= xmm2
5286# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#10
5287# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm9
5288pxor  %xmm2,%xmm9
5289
5290# qhasm:       xmm9 ^= xmm3
5291# asm 1: pxor  <xmm3=int6464#4,<xmm9=int6464#11
5292# asm 2: pxor  <xmm3=%xmm3,<xmm9=%xmm10
5293pxor  %xmm3,%xmm10
5294
5295# qhasm:       xmm13 ^= xmm4
5296# asm 1: pxor  <xmm4=int6464#5,<xmm13=int6464#12
5297# asm 2: pxor  <xmm4=%xmm4,<xmm13=%xmm11
5298pxor  %xmm4,%xmm11
5299
5300# qhasm:       xmm12 ^= xmm0
5301# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
5302# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
5303pxor  %xmm0,%xmm12
5304
5305# qhasm:       xmm14 = xmm11
5306# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5307# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5308movdqa %xmm8,%xmm13
5309
5310# qhasm:       xmm8 = xmm10
5311# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
5312# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
5313movdqa %xmm9,%xmm14
5314
5315# qhasm:       xmm15 = xmm11
5316# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
5317# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
5318movdqa %xmm8,%xmm15
5319
5320# qhasm:       xmm10 |= xmm9
5321# asm 1: por   <xmm9=int6464#11,<xmm10=int6464#10
5322# asm 2: por   <xmm9=%xmm10,<xmm10=%xmm9
5323por   %xmm10,%xmm9
5324
5325# qhasm:       xmm11 |= xmm12
5326# asm 1: por   <xmm12=int6464#13,<xmm11=int6464#9
5327# asm 2: por   <xmm12=%xmm12,<xmm11=%xmm8
5328por   %xmm12,%xmm8
5329
5330# qhasm:       xmm15 ^= xmm8
5331# asm 1: pxor  <xmm8=int6464#15,<xmm15=int6464#16
5332# asm 2: pxor  <xmm8=%xmm14,<xmm15=%xmm15
5333pxor  %xmm14,%xmm15
5334
5335# qhasm:       xmm14 &= xmm12
5336# asm 1: pand  <xmm12=int6464#13,<xmm14=int6464#14
5337# asm 2: pand  <xmm12=%xmm12,<xmm14=%xmm13
5338pand  %xmm12,%xmm13
5339
5340# qhasm:       xmm8 &= xmm9
5341# asm 1: pand  <xmm9=int6464#11,<xmm8=int6464#15
5342# asm 2: pand  <xmm9=%xmm10,<xmm8=%xmm14
5343pand  %xmm10,%xmm14
5344
5345# qhasm:       xmm12 ^= xmm9
5346# asm 1: pxor  <xmm9=int6464#11,<xmm12=int6464#13
5347# asm 2: pxor  <xmm9=%xmm10,<xmm12=%xmm12
5348pxor  %xmm10,%xmm12
5349
5350# qhasm:       xmm15 &= xmm12
5351# asm 1: pand  <xmm12=int6464#13,<xmm15=int6464#16
5352# asm 2: pand  <xmm12=%xmm12,<xmm15=%xmm15
5353pand  %xmm12,%xmm15
5354
5355# qhasm:       xmm12 = xmm3
5356# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
5357# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
5358movdqa %xmm3,%xmm10
5359
5360# qhasm:       xmm12 ^= xmm0
5361# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#11
5362# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm10
5363pxor  %xmm0,%xmm10
5364
5365# qhasm:       xmm13 &= xmm12
5366# asm 1: pand  <xmm12=int6464#11,<xmm13=int6464#12
5367# asm 2: pand  <xmm12=%xmm10,<xmm13=%xmm11
5368pand  %xmm10,%xmm11
5369
5370# qhasm:       xmm11 ^= xmm13
5371# asm 1: pxor  <xmm13=int6464#12,<xmm11=int6464#9
5372# asm 2: pxor  <xmm13=%xmm11,<xmm11=%xmm8
5373pxor  %xmm11,%xmm8
5374
5375# qhasm:       xmm10 ^= xmm13
5376# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
5377# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
5378pxor  %xmm11,%xmm9
5379
5380# qhasm:       xmm13 = xmm7
5381# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
5382# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
5383movdqa %xmm7,%xmm10
5384
5385# qhasm:       xmm13 ^= xmm1
5386# asm 1: pxor  <xmm1=int6464#2,<xmm13=int6464#11
5387# asm 2: pxor  <xmm1=%xmm1,<xmm13=%xmm10
5388pxor  %xmm1,%xmm10
5389
5390# qhasm:       xmm12 = xmm5
5391# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
5392# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
5393movdqa %xmm5,%xmm11
5394
5395# qhasm:       xmm9 = xmm13
5396# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
5397# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
5398movdqa %xmm10,%xmm12
5399
5400# qhasm:       xmm12 ^= xmm6
5401# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#12
5402# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm11
5403pxor  %xmm6,%xmm11
5404
5405# qhasm:       xmm9 |= xmm12
5406# asm 1: por   <xmm12=int6464#12,<xmm9=int6464#13
5407# asm 2: por   <xmm12=%xmm11,<xmm9=%xmm12
5408por   %xmm11,%xmm12
5409
5410# qhasm:       xmm13 &= xmm12
5411# asm 1: pand  <xmm12=int6464#12,<xmm13=int6464#11
5412# asm 2: pand  <xmm12=%xmm11,<xmm13=%xmm10
5413pand  %xmm11,%xmm10
5414
5415# qhasm:       xmm8 ^= xmm13
5416# asm 1: pxor  <xmm13=int6464#11,<xmm8=int6464#15
5417# asm 2: pxor  <xmm13=%xmm10,<xmm8=%xmm14
5418pxor  %xmm10,%xmm14
5419
5420# qhasm:       xmm11 ^= xmm15
5421# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#9
5422# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm8
5423pxor  %xmm15,%xmm8
5424
5425# qhasm:       xmm10 ^= xmm14
5426# asm 1: pxor  <xmm14=int6464#14,<xmm10=int6464#10
5427# asm 2: pxor  <xmm14=%xmm13,<xmm10=%xmm9
5428pxor  %xmm13,%xmm9
5429
5430# qhasm:       xmm9 ^= xmm15
5431# asm 1: pxor  <xmm15=int6464#16,<xmm9=int6464#13
5432# asm 2: pxor  <xmm15=%xmm15,<xmm9=%xmm12
5433pxor  %xmm15,%xmm12
5434
5435# qhasm:       xmm8 ^= xmm14
5436# asm 1: pxor  <xmm14=int6464#14,<xmm8=int6464#15
5437# asm 2: pxor  <xmm14=%xmm13,<xmm8=%xmm14
5438pxor  %xmm13,%xmm14
5439
5440# qhasm:       xmm9 ^= xmm14
5441# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
5442# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
5443pxor  %xmm13,%xmm12
5444
5445# qhasm:       xmm12 = xmm2
5446# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
5447# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
5448movdqa %xmm2,%xmm10
5449
5450# qhasm:       xmm13 = xmm4
5451# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
5452# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
5453movdqa %xmm4,%xmm11
5454
5455# qhasm:       xmm14 = xmm1
5456# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
5457# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
5458movdqa %xmm1,%xmm13
5459
5460# qhasm:       xmm15 = xmm7
5461# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
5462# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
5463movdqa %xmm7,%xmm15
5464
5465# qhasm:       xmm12 &= xmm3
5466# asm 1: pand  <xmm3=int6464#4,<xmm12=int6464#11
5467# asm 2: pand  <xmm3=%xmm3,<xmm12=%xmm10
5468pand  %xmm3,%xmm10
5469
5470# qhasm:       xmm13 &= xmm0
5471# asm 1: pand  <xmm0=int6464#1,<xmm13=int6464#12
5472# asm 2: pand  <xmm0=%xmm0,<xmm13=%xmm11
5473pand  %xmm0,%xmm11
5474
5475# qhasm:       xmm14 &= xmm5
5476# asm 1: pand  <xmm5=int6464#6,<xmm14=int6464#14
5477# asm 2: pand  <xmm5=%xmm5,<xmm14=%xmm13
5478pand  %xmm5,%xmm13
5479
5480# qhasm:       xmm15 |= xmm6
5481# asm 1: por   <xmm6=int6464#7,<xmm15=int6464#16
5482# asm 2: por   <xmm6=%xmm6,<xmm15=%xmm15
5483por   %xmm6,%xmm15
5484
5485# qhasm:       xmm11 ^= xmm12
5486# asm 1: pxor  <xmm12=int6464#11,<xmm11=int6464#9
5487# asm 2: pxor  <xmm12=%xmm10,<xmm11=%xmm8
5488pxor  %xmm10,%xmm8
5489
5490# qhasm:       xmm10 ^= xmm13
5491# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
5492# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
5493pxor  %xmm11,%xmm9
5494
5495# qhasm:       xmm9 ^= xmm14
5496# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
5497# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
5498pxor  %xmm13,%xmm12
5499
5500# qhasm:       xmm8 ^= xmm15
5501# asm 1: pxor  <xmm15=int6464#16,<xmm8=int6464#15
5502# asm 2: pxor  <xmm15=%xmm15,<xmm8=%xmm14
5503pxor  %xmm15,%xmm14
5504
5505# qhasm:       xmm12 = xmm11
5506# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
5507# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
5508movdqa %xmm8,%xmm10
5509
5510# qhasm:       xmm12 ^= xmm10
5511# asm 1: pxor  <xmm10=int6464#10,<xmm12=int6464#11
5512# asm 2: pxor  <xmm10=%xmm9,<xmm12=%xmm10
5513pxor  %xmm9,%xmm10
5514
5515# qhasm:       xmm11 &= xmm9
5516# asm 1: pand  <xmm9=int6464#13,<xmm11=int6464#9
5517# asm 2: pand  <xmm9=%xmm12,<xmm11=%xmm8
5518pand  %xmm12,%xmm8
5519
5520# qhasm:       xmm14 = xmm8
5521# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
5522# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
5523movdqa %xmm14,%xmm11
5524
5525# qhasm:       xmm14 ^= xmm11
5526# asm 1: pxor  <xmm11=int6464#9,<xmm14=int6464#12
5527# asm 2: pxor  <xmm11=%xmm8,<xmm14=%xmm11
5528pxor  %xmm8,%xmm11
5529
5530# qhasm:       xmm15 = xmm12
5531# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
5532# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
5533movdqa %xmm10,%xmm13
5534
5535# qhasm:       xmm15 &= xmm14
5536# asm 1: pand  <xmm14=int6464#12,<xmm15=int6464#14
5537# asm 2: pand  <xmm14=%xmm11,<xmm15=%xmm13
5538pand  %xmm11,%xmm13
5539
5540# qhasm:       xmm15 ^= xmm10
5541# asm 1: pxor  <xmm10=int6464#10,<xmm15=int6464#14
5542# asm 2: pxor  <xmm10=%xmm9,<xmm15=%xmm13
5543pxor  %xmm9,%xmm13
5544
5545# qhasm:       xmm13 = xmm9
5546# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
5547# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
5548movdqa %xmm12,%xmm15
5549
5550# qhasm:       xmm13 ^= xmm8
5551# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
5552# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
5553pxor  %xmm14,%xmm15
5554
5555# qhasm:       xmm11 ^= xmm10
5556# asm 1: pxor  <xmm10=int6464#10,<xmm11=int6464#9
5557# asm 2: pxor  <xmm10=%xmm9,<xmm11=%xmm8
5558pxor  %xmm9,%xmm8
5559
5560# qhasm:       xmm13 &= xmm11
5561# asm 1: pand  <xmm11=int6464#9,<xmm13=int6464#16
5562# asm 2: pand  <xmm11=%xmm8,<xmm13=%xmm15
5563pand  %xmm8,%xmm15
5564
5565# qhasm:       xmm13 ^= xmm8
5566# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
5567# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
5568pxor  %xmm14,%xmm15
5569
5570# qhasm:       xmm9 ^= xmm13
5571# asm 1: pxor  <xmm13=int6464#16,<xmm9=int6464#13
5572# asm 2: pxor  <xmm13=%xmm15,<xmm9=%xmm12
5573pxor  %xmm15,%xmm12
5574
5575# qhasm:       xmm10 = xmm14
5576# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
5577# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
5578movdqa %xmm11,%xmm8
5579
5580# qhasm:       xmm10 ^= xmm13
5581# asm 1: pxor  <xmm13=int6464#16,<xmm10=int6464#9
5582# asm 2: pxor  <xmm13=%xmm15,<xmm10=%xmm8
5583pxor  %xmm15,%xmm8
5584
5585# qhasm:       xmm10 &= xmm8
5586# asm 1: pand  <xmm8=int6464#15,<xmm10=int6464#9
5587# asm 2: pand  <xmm8=%xmm14,<xmm10=%xmm8
5588pand  %xmm14,%xmm8
5589
5590# qhasm:       xmm9 ^= xmm10
5591# asm 1: pxor  <xmm10=int6464#9,<xmm9=int6464#13
5592# asm 2: pxor  <xmm10=%xmm8,<xmm9=%xmm12
5593pxor  %xmm8,%xmm12
5594
5595# qhasm:       xmm14 ^= xmm10
5596# asm 1: pxor  <xmm10=int6464#9,<xmm14=int6464#12
5597# asm 2: pxor  <xmm10=%xmm8,<xmm14=%xmm11
5598pxor  %xmm8,%xmm11
5599
5600# qhasm:       xmm14 &= xmm15
5601# asm 1: pand  <xmm15=int6464#14,<xmm14=int6464#12
5602# asm 2: pand  <xmm15=%xmm13,<xmm14=%xmm11
5603pand  %xmm13,%xmm11
5604
5605# qhasm:       xmm14 ^= xmm12
5606# asm 1: pxor  <xmm12=int6464#11,<xmm14=int6464#12
5607# asm 2: pxor  <xmm12=%xmm10,<xmm14=%xmm11
5608pxor  %xmm10,%xmm11
5609
5610# qhasm:         xmm12 = xmm6
5611# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
5612# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
5613movdqa %xmm6,%xmm8
5614
5615# qhasm:         xmm8 = xmm5
5616# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
5617# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
5618movdqa %xmm5,%xmm9
5619
5620# qhasm:           xmm10 = xmm15
5621# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5622# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5623movdqa %xmm13,%xmm10
5624
5625# qhasm:           xmm10 ^= xmm14
5626# asm 1: pxor  <xmm14=int6464#12,<xmm10=int6464#11
5627# asm 2: pxor  <xmm14=%xmm11,<xmm10=%xmm10
5628pxor  %xmm11,%xmm10
5629
5630# qhasm:           xmm10 &= xmm6
5631# asm 1: pand  <xmm6=int6464#7,<xmm10=int6464#11
5632# asm 2: pand  <xmm6=%xmm6,<xmm10=%xmm10
5633pand  %xmm6,%xmm10
5634
5635# qhasm:           xmm6 ^= xmm5
5636# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
5637# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
5638pxor  %xmm5,%xmm6
5639
5640# qhasm:           xmm6 &= xmm14
5641# asm 1: pand  <xmm14=int6464#12,<xmm6=int6464#7
5642# asm 2: pand  <xmm14=%xmm11,<xmm6=%xmm6
5643pand  %xmm11,%xmm6
5644
5645# qhasm:           xmm5 &= xmm15
5646# asm 1: pand  <xmm15=int6464#14,<xmm5=int6464#6
5647# asm 2: pand  <xmm15=%xmm13,<xmm5=%xmm5
5648pand  %xmm13,%xmm5
5649
5650# qhasm:           xmm6 ^= xmm5
5651# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
5652# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
5653pxor  %xmm5,%xmm6
5654
5655# qhasm:           xmm5 ^= xmm10
5656# asm 1: pxor  <xmm10=int6464#11,<xmm5=int6464#6
5657# asm 2: pxor  <xmm10=%xmm10,<xmm5=%xmm5
5658pxor  %xmm10,%xmm5
5659
5660# qhasm:         xmm12 ^= xmm0
5661# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#9
5662# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm8
5663pxor  %xmm0,%xmm8
5664
5665# qhasm:         xmm8 ^= xmm3
5666# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#10
5667# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm9
5668pxor  %xmm3,%xmm9
5669
5670# qhasm:         xmm15 ^= xmm13
5671# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
5672# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
5673pxor  %xmm15,%xmm13
5674
5675# qhasm:         xmm14 ^= xmm9
5676# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
5677# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
5678pxor  %xmm12,%xmm11
5679
5680# qhasm:           xmm11 = xmm15
5681# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5682# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5683movdqa %xmm13,%xmm10
5684
5685# qhasm:           xmm11 ^= xmm14
5686# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
5687# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
5688pxor  %xmm11,%xmm10
5689
5690# qhasm:           xmm11 &= xmm12
5691# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
5692# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
5693pand  %xmm8,%xmm10
5694
5695# qhasm:           xmm12 ^= xmm8
5696# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
5697# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
5698pxor  %xmm9,%xmm8
5699
5700# qhasm:           xmm12 &= xmm14
5701# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
5702# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
5703pand  %xmm11,%xmm8
5704
5705# qhasm:           xmm8 &= xmm15
5706# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
5707# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
5708pand  %xmm13,%xmm9
5709
5710# qhasm:           xmm8 ^= xmm12
5711# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
5712# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
5713pxor  %xmm8,%xmm9
5714
5715# qhasm:           xmm12 ^= xmm11
5716# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
5717# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
5718pxor  %xmm10,%xmm8
5719
5720# qhasm:           xmm10 = xmm13
5721# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5722# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5723movdqa %xmm15,%xmm10
5724
5725# qhasm:           xmm10 ^= xmm9
5726# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
5727# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
5728pxor  %xmm12,%xmm10
5729
5730# qhasm:           xmm10 &= xmm0
5731# asm 1: pand  <xmm0=int6464#1,<xmm10=int6464#11
5732# asm 2: pand  <xmm0=%xmm0,<xmm10=%xmm10
5733pand  %xmm0,%xmm10
5734
5735# qhasm:           xmm0 ^= xmm3
5736# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
5737# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
5738pxor  %xmm3,%xmm0
5739
5740# qhasm:           xmm0 &= xmm9
5741# asm 1: pand  <xmm9=int6464#13,<xmm0=int6464#1
5742# asm 2: pand  <xmm9=%xmm12,<xmm0=%xmm0
5743pand  %xmm12,%xmm0
5744
5745# qhasm:           xmm3 &= xmm13
5746# asm 1: pand  <xmm13=int6464#16,<xmm3=int6464#4
5747# asm 2: pand  <xmm13=%xmm15,<xmm3=%xmm3
5748pand  %xmm15,%xmm3
5749
5750# qhasm:           xmm0 ^= xmm3
5751# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
5752# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
5753pxor  %xmm3,%xmm0
5754
5755# qhasm:           xmm3 ^= xmm10
5756# asm 1: pxor  <xmm10=int6464#11,<xmm3=int6464#4
5757# asm 2: pxor  <xmm10=%xmm10,<xmm3=%xmm3
5758pxor  %xmm10,%xmm3
5759
5760# qhasm:         xmm6 ^= xmm12
5761# asm 1: pxor  <xmm12=int6464#9,<xmm6=int6464#7
5762# asm 2: pxor  <xmm12=%xmm8,<xmm6=%xmm6
5763pxor  %xmm8,%xmm6
5764
5765# qhasm:         xmm0 ^= xmm12
5766# asm 1: pxor  <xmm12=int6464#9,<xmm0=int6464#1
5767# asm 2: pxor  <xmm12=%xmm8,<xmm0=%xmm0
5768pxor  %xmm8,%xmm0
5769
5770# qhasm:         xmm5 ^= xmm8
5771# asm 1: pxor  <xmm8=int6464#10,<xmm5=int6464#6
5772# asm 2: pxor  <xmm8=%xmm9,<xmm5=%xmm5
5773pxor  %xmm9,%xmm5
5774
5775# qhasm:         xmm3 ^= xmm8
5776# asm 1: pxor  <xmm8=int6464#10,<xmm3=int6464#4
5777# asm 2: pxor  <xmm8=%xmm9,<xmm3=%xmm3
5778pxor  %xmm9,%xmm3
5779
5780# qhasm:         xmm12 = xmm7
5781# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
5782# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
5783movdqa %xmm7,%xmm8
5784
5785# qhasm:         xmm8 = xmm1
5786# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5787# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5788movdqa %xmm1,%xmm9
5789
5790# qhasm:         xmm12 ^= xmm4
5791# asm 1: pxor  <xmm4=int6464#5,<xmm12=int6464#9
5792# asm 2: pxor  <xmm4=%xmm4,<xmm12=%xmm8
5793pxor  %xmm4,%xmm8
5794
5795# qhasm:         xmm8 ^= xmm2
5796# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#10
5797# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm9
5798pxor  %xmm2,%xmm9
5799
5800# qhasm:           xmm11 = xmm15
5801# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5802# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5803movdqa %xmm13,%xmm10
5804
5805# qhasm:           xmm11 ^= xmm14
5806# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
5807# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
5808pxor  %xmm11,%xmm10
5809
5810# qhasm:           xmm11 &= xmm12
5811# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
5812# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
5813pand  %xmm8,%xmm10
5814
5815# qhasm:           xmm12 ^= xmm8
5816# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
5817# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
5818pxor  %xmm9,%xmm8
5819
5820# qhasm:           xmm12 &= xmm14
5821# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
5822# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
5823pand  %xmm11,%xmm8
5824
5825# qhasm:           xmm8 &= xmm15
5826# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
5827# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
5828pand  %xmm13,%xmm9
5829
5830# qhasm:           xmm8 ^= xmm12
5831# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
5832# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
5833pxor  %xmm8,%xmm9
5834
5835# qhasm:           xmm12 ^= xmm11
5836# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
5837# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
5838pxor  %xmm10,%xmm8
5839
5840# qhasm:           xmm10 = xmm13
5841# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5842# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5843movdqa %xmm15,%xmm10
5844
5845# qhasm:           xmm10 ^= xmm9
5846# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
5847# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
5848pxor  %xmm12,%xmm10
5849
5850# qhasm:           xmm10 &= xmm4
5851# asm 1: pand  <xmm4=int6464#5,<xmm10=int6464#11
5852# asm 2: pand  <xmm4=%xmm4,<xmm10=%xmm10
5853pand  %xmm4,%xmm10
5854
5855# qhasm:           xmm4 ^= xmm2
5856# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
5857# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
5858pxor  %xmm2,%xmm4
5859
5860# qhasm:           xmm4 &= xmm9
5861# asm 1: pand  <xmm9=int6464#13,<xmm4=int6464#5
5862# asm 2: pand  <xmm9=%xmm12,<xmm4=%xmm4
5863pand  %xmm12,%xmm4
5864
5865# qhasm:           xmm2 &= xmm13
5866# asm 1: pand  <xmm13=int6464#16,<xmm2=int6464#3
5867# asm 2: pand  <xmm13=%xmm15,<xmm2=%xmm2
5868pand  %xmm15,%xmm2
5869
5870# qhasm:           xmm4 ^= xmm2
5871# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
5872# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
5873pxor  %xmm2,%xmm4
5874
5875# qhasm:           xmm2 ^= xmm10
5876# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#3
5877# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm2
5878pxor  %xmm10,%xmm2
5879
5880# qhasm:         xmm15 ^= xmm13
5881# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
5882# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
5883pxor  %xmm15,%xmm13
5884
5885# qhasm:         xmm14 ^= xmm9
5886# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
5887# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
5888pxor  %xmm12,%xmm11
5889
5890# qhasm:           xmm11 = xmm15
5891# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5892# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5893movdqa %xmm13,%xmm10
5894
5895# qhasm:           xmm11 ^= xmm14
5896# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
5897# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
5898pxor  %xmm11,%xmm10
5899
5900# qhasm:           xmm11 &= xmm7
5901# asm 1: pand  <xmm7=int6464#8,<xmm11=int6464#11
5902# asm 2: pand  <xmm7=%xmm7,<xmm11=%xmm10
5903pand  %xmm7,%xmm10
5904
5905# qhasm:           xmm7 ^= xmm1
5906# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
5907# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
5908pxor  %xmm1,%xmm7
5909
5910# qhasm:           xmm7 &= xmm14
5911# asm 1: pand  <xmm14=int6464#12,<xmm7=int6464#8
5912# asm 2: pand  <xmm14=%xmm11,<xmm7=%xmm7
5913pand  %xmm11,%xmm7
5914
5915# qhasm:           xmm1 &= xmm15
5916# asm 1: pand  <xmm15=int6464#14,<xmm1=int6464#2
5917# asm 2: pand  <xmm15=%xmm13,<xmm1=%xmm1
5918pand  %xmm13,%xmm1
5919
5920# qhasm:           xmm7 ^= xmm1
5921# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
5922# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
5923pxor  %xmm1,%xmm7
5924
5925# qhasm:           xmm1 ^= xmm11
5926# asm 1: pxor  <xmm11=int6464#11,<xmm1=int6464#2
5927# asm 2: pxor  <xmm11=%xmm10,<xmm1=%xmm1
5928pxor  %xmm10,%xmm1
5929
5930# qhasm:         xmm7 ^= xmm12
5931# asm 1: pxor  <xmm12=int6464#9,<xmm7=int6464#8
5932# asm 2: pxor  <xmm12=%xmm8,<xmm7=%xmm7
5933pxor  %xmm8,%xmm7
5934
5935# qhasm:         xmm4 ^= xmm12
5936# asm 1: pxor  <xmm12=int6464#9,<xmm4=int6464#5
5937# asm 2: pxor  <xmm12=%xmm8,<xmm4=%xmm4
5938pxor  %xmm8,%xmm4
5939
5940# qhasm:         xmm1 ^= xmm8
5941# asm 1: pxor  <xmm8=int6464#10,<xmm1=int6464#2
5942# asm 2: pxor  <xmm8=%xmm9,<xmm1=%xmm1
5943pxor  %xmm9,%xmm1
5944
5945# qhasm:         xmm2 ^= xmm8
5946# asm 1: pxor  <xmm8=int6464#10,<xmm2=int6464#3
5947# asm 2: pxor  <xmm8=%xmm9,<xmm2=%xmm2
5948pxor  %xmm9,%xmm2
5949
5950# qhasm:       xmm7 ^= xmm0
5951# asm 1: pxor  <xmm0=int6464#1,<xmm7=int6464#8
5952# asm 2: pxor  <xmm0=%xmm0,<xmm7=%xmm7
5953pxor  %xmm0,%xmm7
5954
5955# qhasm:       xmm1 ^= xmm6
5956# asm 1: pxor  <xmm6=int6464#7,<xmm1=int6464#2
5957# asm 2: pxor  <xmm6=%xmm6,<xmm1=%xmm1
5958pxor  %xmm6,%xmm1
5959
5960# qhasm:       xmm4 ^= xmm7
5961# asm 1: pxor  <xmm7=int6464#8,<xmm4=int6464#5
5962# asm 2: pxor  <xmm7=%xmm7,<xmm4=%xmm4
5963pxor  %xmm7,%xmm4
5964
5965# qhasm:       xmm6 ^= xmm0
5966# asm 1: pxor  <xmm0=int6464#1,<xmm6=int6464#7
5967# asm 2: pxor  <xmm0=%xmm0,<xmm6=%xmm6
5968pxor  %xmm0,%xmm6
5969
5970# qhasm:       xmm0 ^= xmm1
5971# asm 1: pxor  <xmm1=int6464#2,<xmm0=int6464#1
5972# asm 2: pxor  <xmm1=%xmm1,<xmm0=%xmm0
5973pxor  %xmm1,%xmm0
5974
5975# qhasm:       xmm1 ^= xmm5
5976# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
5977# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
5978pxor  %xmm5,%xmm1
5979
5980# qhasm:       xmm5 ^= xmm2
5981# asm 1: pxor  <xmm2=int6464#3,<xmm5=int6464#6
5982# asm 2: pxor  <xmm2=%xmm2,<xmm5=%xmm5
5983pxor  %xmm2,%xmm5
5984
5985# qhasm:       xmm4 ^= xmm5
5986# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
5987# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
5988pxor  %xmm5,%xmm4
5989
5990# qhasm:       xmm2 ^= xmm3
5991# asm 1: pxor  <xmm3=int6464#4,<xmm2=int6464#3
5992# asm 2: pxor  <xmm3=%xmm3,<xmm2=%xmm2
5993pxor  %xmm3,%xmm2
5994
5995# qhasm:       xmm3 ^= xmm5
5996# asm 1: pxor  <xmm5=int6464#6,<xmm3=int6464#4
5997# asm 2: pxor  <xmm5=%xmm5,<xmm3=%xmm3
5998pxor  %xmm5,%xmm3
5999
6000# qhasm:       xmm6 ^= xmm3
6001# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
6002# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
6003pxor  %xmm3,%xmm6
6004
6005# qhasm:     xmm8 = shuffle dwords of xmm0 by 0x93
6006# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
6007# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
6008pshufd $0x93,%xmm0,%xmm8
6009
6010# qhasm:     xmm9 = shuffle dwords of xmm1 by 0x93
6011# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
6012# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
6013pshufd $0x93,%xmm1,%xmm9
6014
6015# qhasm:     xmm10 = shuffle dwords of xmm4 by 0x93
6016# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
6017# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
6018pshufd $0x93,%xmm4,%xmm10
6019
6020# qhasm:     xmm11 = shuffle dwords of xmm6 by 0x93
6021# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
6022# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
6023pshufd $0x93,%xmm6,%xmm11
6024
6025# qhasm:     xmm12 = shuffle dwords of xmm3 by 0x93
6026# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
6027# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
6028pshufd $0x93,%xmm3,%xmm12
6029
6030# qhasm:     xmm13 = shuffle dwords of xmm7 by 0x93
6031# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
6032# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
6033pshufd $0x93,%xmm7,%xmm13
6034
6035# qhasm:     xmm14 = shuffle dwords of xmm2 by 0x93
6036# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
6037# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
6038pshufd $0x93,%xmm2,%xmm14
6039
6040# qhasm:     xmm15 = shuffle dwords of xmm5 by 0x93
6041# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
6042# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
6043pshufd $0x93,%xmm5,%xmm15
6044
6045# qhasm:     xmm0 ^= xmm8
6046# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
6047# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
6048pxor  %xmm8,%xmm0
6049
6050# qhasm:     xmm1 ^= xmm9
6051# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
6052# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
6053pxor  %xmm9,%xmm1
6054
6055# qhasm:     xmm4 ^= xmm10
6056# asm 1: pxor  <xmm10=int6464#11,<xmm4=int6464#5
6057# asm 2: pxor  <xmm10=%xmm10,<xmm4=%xmm4
6058pxor  %xmm10,%xmm4
6059
6060# qhasm:     xmm6 ^= xmm11
6061# asm 1: pxor  <xmm11=int6464#12,<xmm6=int6464#7
6062# asm 2: pxor  <xmm11=%xmm11,<xmm6=%xmm6
6063pxor  %xmm11,%xmm6
6064
6065# qhasm:     xmm3 ^= xmm12
6066# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
6067# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
6068pxor  %xmm12,%xmm3
6069
6070# qhasm:     xmm7 ^= xmm13
6071# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
6072# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
6073pxor  %xmm13,%xmm7
6074
6075# qhasm:     xmm2 ^= xmm14
6076# asm 1: pxor  <xmm14=int6464#15,<xmm2=int6464#3
6077# asm 2: pxor  <xmm14=%xmm14,<xmm2=%xmm2
6078pxor  %xmm14,%xmm2
6079
6080# qhasm:     xmm5 ^= xmm15
6081# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
6082# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
6083pxor  %xmm15,%xmm5
6084
6085# qhasm:     xmm8 ^= xmm5
6086# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
6087# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
6088pxor  %xmm5,%xmm8
6089
6090# qhasm:     xmm9 ^= xmm0
6091# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
6092# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
6093pxor  %xmm0,%xmm9
6094
6095# qhasm:     xmm10 ^= xmm1
6096# asm 1: pxor  <xmm1=int6464#2,<xmm10=int6464#11
6097# asm 2: pxor  <xmm1=%xmm1,<xmm10=%xmm10
6098pxor  %xmm1,%xmm10
6099
6100# qhasm:     xmm9 ^= xmm5
6101# asm 1: pxor  <xmm5=int6464#6,<xmm9=int6464#10
6102# asm 2: pxor  <xmm5=%xmm5,<xmm9=%xmm9
6103pxor  %xmm5,%xmm9
6104
6105# qhasm:     xmm11 ^= xmm4
6106# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
6107# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
6108pxor  %xmm4,%xmm11
6109
6110# qhasm:     xmm12 ^= xmm6
6111# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#13
6112# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm12
6113pxor  %xmm6,%xmm12
6114
6115# qhasm:     xmm13 ^= xmm3
6116# asm 1: pxor  <xmm3=int6464#4,<xmm13=int6464#14
6117# asm 2: pxor  <xmm3=%xmm3,<xmm13=%xmm13
6118pxor  %xmm3,%xmm13
6119
6120# qhasm:     xmm11 ^= xmm5
6121# asm 1: pxor  <xmm5=int6464#6,<xmm11=int6464#12
6122# asm 2: pxor  <xmm5=%xmm5,<xmm11=%xmm11
6123pxor  %xmm5,%xmm11
6124
6125# qhasm:     xmm14 ^= xmm7
6126# asm 1: pxor  <xmm7=int6464#8,<xmm14=int6464#15
6127# asm 2: pxor  <xmm7=%xmm7,<xmm14=%xmm14
6128pxor  %xmm7,%xmm14
6129
6130# qhasm:     xmm15 ^= xmm2
6131# asm 1: pxor  <xmm2=int6464#3,<xmm15=int6464#16
6132# asm 2: pxor  <xmm2=%xmm2,<xmm15=%xmm15
6133pxor  %xmm2,%xmm15
6134
6135# qhasm:     xmm12 ^= xmm5
6136# asm 1: pxor  <xmm5=int6464#6,<xmm12=int6464#13
6137# asm 2: pxor  <xmm5=%xmm5,<xmm12=%xmm12
6138pxor  %xmm5,%xmm12
6139
6140# qhasm:     xmm0 = shuffle dwords of xmm0 by 0x4E
6141# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
6142# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
6143pshufd $0x4E,%xmm0,%xmm0
6144
6145# qhasm:     xmm1 = shuffle dwords of xmm1 by 0x4E
6146# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
6147# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
6148pshufd $0x4E,%xmm1,%xmm1
6149
6150# qhasm:     xmm4 = shuffle dwords of xmm4 by 0x4E
6151# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
6152# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
6153pshufd $0x4E,%xmm4,%xmm4
6154
6155# qhasm:     xmm6 = shuffle dwords of xmm6 by 0x4E
6156# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
6157# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
6158pshufd $0x4E,%xmm6,%xmm6
6159
6160# qhasm:     xmm3 = shuffle dwords of xmm3 by 0x4E
6161# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
6162# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
6163pshufd $0x4E,%xmm3,%xmm3
6164
6165# qhasm:     xmm7 = shuffle dwords of xmm7 by 0x4E
6166# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
6167# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
6168pshufd $0x4E,%xmm7,%xmm7
6169
6170# qhasm:     xmm2 = shuffle dwords of xmm2 by 0x4E
6171# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
6172# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
6173pshufd $0x4E,%xmm2,%xmm2
6174
6175# qhasm:     xmm5 = shuffle dwords of xmm5 by 0x4E
6176# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
6177# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
6178pshufd $0x4E,%xmm5,%xmm5
6179
6180# qhasm:     xmm8 ^= xmm0
6181# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
6182# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
6183pxor  %xmm0,%xmm8
6184
6185# qhasm:     xmm9 ^= xmm1
6186# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
6187# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
6188pxor  %xmm1,%xmm9
6189
6190# qhasm:     xmm10 ^= xmm4
6191# asm 1: pxor  <xmm4=int6464#5,<xmm10=int6464#11
6192# asm 2: pxor  <xmm4=%xmm4,<xmm10=%xmm10
6193pxor  %xmm4,%xmm10
6194
6195# qhasm:     xmm11 ^= xmm6
6196# asm 1: pxor  <xmm6=int6464#7,<xmm11=int6464#12
6197# asm 2: pxor  <xmm6=%xmm6,<xmm11=%xmm11
6198pxor  %xmm6,%xmm11
6199
6200# qhasm:     xmm12 ^= xmm3
6201# asm 1: pxor  <xmm3=int6464#4,<xmm12=int6464#13
6202# asm 2: pxor  <xmm3=%xmm3,<xmm12=%xmm12
6203pxor  %xmm3,%xmm12
6204
6205# qhasm:     xmm13 ^= xmm7
6206# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
6207# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
6208pxor  %xmm7,%xmm13
6209
6210# qhasm:     xmm14 ^= xmm2
6211# asm 1: pxor  <xmm2=int6464#3,<xmm14=int6464#15
6212# asm 2: pxor  <xmm2=%xmm2,<xmm14=%xmm14
6213pxor  %xmm2,%xmm14
6214
6215# qhasm:     xmm15 ^= xmm5
6216# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
6217# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
6218pxor  %xmm5,%xmm15
6219
6220# qhasm:     xmm8 ^= *(int128 *)(c + 640)
6221# asm 1: pxor 640(<c=int64#4),<xmm8=int6464#9
6222# asm 2: pxor 640(<c=%rcx),<xmm8=%xmm8
6223pxor 640(%rcx),%xmm8
6224
6225# qhasm:     shuffle bytes of xmm8 by SR
6226# asm 1: pshufb SR,<xmm8=int6464#9
6227# asm 2: pshufb SR,<xmm8=%xmm8
6228pshufb SR,%xmm8
6229
6230# qhasm:     xmm9 ^= *(int128 *)(c + 656)
6231# asm 1: pxor 656(<c=int64#4),<xmm9=int6464#10
6232# asm 2: pxor 656(<c=%rcx),<xmm9=%xmm9
6233pxor 656(%rcx),%xmm9
6234
6235# qhasm:     shuffle bytes of xmm9 by SR
6236# asm 1: pshufb SR,<xmm9=int6464#10
6237# asm 2: pshufb SR,<xmm9=%xmm9
6238pshufb SR,%xmm9
6239
6240# qhasm:     xmm10 ^= *(int128 *)(c + 672)
6241# asm 1: pxor 672(<c=int64#4),<xmm10=int6464#11
6242# asm 2: pxor 672(<c=%rcx),<xmm10=%xmm10
6243pxor 672(%rcx),%xmm10
6244
6245# qhasm:     shuffle bytes of xmm10 by SR
6246# asm 1: pshufb SR,<xmm10=int6464#11
6247# asm 2: pshufb SR,<xmm10=%xmm10
6248pshufb SR,%xmm10
6249
6250# qhasm:     xmm11 ^= *(int128 *)(c + 688)
6251# asm 1: pxor 688(<c=int64#4),<xmm11=int6464#12
6252# asm 2: pxor 688(<c=%rcx),<xmm11=%xmm11
6253pxor 688(%rcx),%xmm11
6254
6255# qhasm:     shuffle bytes of xmm11 by SR
6256# asm 1: pshufb SR,<xmm11=int6464#12
6257# asm 2: pshufb SR,<xmm11=%xmm11
6258pshufb SR,%xmm11
6259
6260# qhasm:     xmm12 ^= *(int128 *)(c + 704)
6261# asm 1: pxor 704(<c=int64#4),<xmm12=int6464#13
6262# asm 2: pxor 704(<c=%rcx),<xmm12=%xmm12
6263pxor 704(%rcx),%xmm12
6264
6265# qhasm:     shuffle bytes of xmm12 by SR
6266# asm 1: pshufb SR,<xmm12=int6464#13
6267# asm 2: pshufb SR,<xmm12=%xmm12
6268pshufb SR,%xmm12
6269
6270# qhasm:     xmm13 ^= *(int128 *)(c + 720)
6271# asm 1: pxor 720(<c=int64#4),<xmm13=int6464#14
6272# asm 2: pxor 720(<c=%rcx),<xmm13=%xmm13
6273pxor 720(%rcx),%xmm13
6274
6275# qhasm:     shuffle bytes of xmm13 by SR
6276# asm 1: pshufb SR,<xmm13=int6464#14
6277# asm 2: pshufb SR,<xmm13=%xmm13
6278pshufb SR,%xmm13
6279
6280# qhasm:     xmm14 ^= *(int128 *)(c + 736)
6281# asm 1: pxor 736(<c=int64#4),<xmm14=int6464#15
6282# asm 2: pxor 736(<c=%rcx),<xmm14=%xmm14
6283pxor 736(%rcx),%xmm14
6284
6285# qhasm:     shuffle bytes of xmm14 by SR
6286# asm 1: pshufb SR,<xmm14=int6464#15
6287# asm 2: pshufb SR,<xmm14=%xmm14
6288pshufb SR,%xmm14
6289
6290# qhasm:     xmm15 ^= *(int128 *)(c + 752)
6291# asm 1: pxor 752(<c=int64#4),<xmm15=int6464#16
6292# asm 2: pxor 752(<c=%rcx),<xmm15=%xmm15
6293pxor 752(%rcx),%xmm15
6294
6295# qhasm:     shuffle bytes of xmm15 by SR
6296# asm 1: pshufb SR,<xmm15=int6464#16
6297# asm 2: pshufb SR,<xmm15=%xmm15
6298pshufb SR,%xmm15
6299
6300# qhasm:       xmm13 ^= xmm14
6301# asm 1: pxor  <xmm14=int6464#15,<xmm13=int6464#14
6302# asm 2: pxor  <xmm14=%xmm14,<xmm13=%xmm13
6303pxor  %xmm14,%xmm13
6304
6305# qhasm:       xmm10 ^= xmm9
6306# asm 1: pxor  <xmm9=int6464#10,<xmm10=int6464#11
6307# asm 2: pxor  <xmm9=%xmm9,<xmm10=%xmm10
6308pxor  %xmm9,%xmm10
6309
6310# qhasm:       xmm13 ^= xmm8
6311# asm 1: pxor  <xmm8=int6464#9,<xmm13=int6464#14
6312# asm 2: pxor  <xmm8=%xmm8,<xmm13=%xmm13
6313pxor  %xmm8,%xmm13
6314
6315# qhasm:       xmm14 ^= xmm10
6316# asm 1: pxor  <xmm10=int6464#11,<xmm14=int6464#15
6317# asm 2: pxor  <xmm10=%xmm10,<xmm14=%xmm14
6318pxor  %xmm10,%xmm14
6319
6320# qhasm:       xmm11 ^= xmm8
6321# asm 1: pxor  <xmm8=int6464#9,<xmm11=int6464#12
6322# asm 2: pxor  <xmm8=%xmm8,<xmm11=%xmm11
6323pxor  %xmm8,%xmm11
6324
6325# qhasm:       xmm14 ^= xmm11
6326# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
6327# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
6328pxor  %xmm11,%xmm14
6329
6330# qhasm:       xmm11 ^= xmm15
6331# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#12
6332# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm11
6333pxor  %xmm15,%xmm11
6334
6335# qhasm:       xmm11 ^= xmm12
6336# asm 1: pxor  <xmm12=int6464#13,<xmm11=int6464#12
6337# asm 2: pxor  <xmm12=%xmm12,<xmm11=%xmm11
6338pxor  %xmm12,%xmm11
6339
6340# qhasm:       xmm15 ^= xmm13
6341# asm 1: pxor  <xmm13=int6464#14,<xmm15=int6464#16
6342# asm 2: pxor  <xmm13=%xmm13,<xmm15=%xmm15
6343pxor  %xmm13,%xmm15
6344
6345# qhasm:       xmm11 ^= xmm9
6346# asm 1: pxor  <xmm9=int6464#10,<xmm11=int6464#12
6347# asm 2: pxor  <xmm9=%xmm9,<xmm11=%xmm11
6348pxor  %xmm9,%xmm11
6349
6350# qhasm:       xmm12 ^= xmm13
6351# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
6352# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
6353pxor  %xmm13,%xmm12
6354
6355# qhasm:       xmm10 ^= xmm15
6356# asm 1: pxor  <xmm15=int6464#16,<xmm10=int6464#11
6357# asm 2: pxor  <xmm15=%xmm15,<xmm10=%xmm10
6358pxor  %xmm15,%xmm10
6359
6360# qhasm:       xmm9 ^= xmm13
6361# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
6362# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
6363pxor  %xmm13,%xmm9
6364
6365# qhasm:       xmm3 = xmm15
6366# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
6367# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
6368movdqa %xmm15,%xmm0
6369
6370# qhasm:       xmm2 = xmm9
6371# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
6372# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
6373movdqa %xmm9,%xmm1
6374
6375# qhasm:       xmm1 = xmm13
6376# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
6377# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
6378movdqa %xmm13,%xmm2
6379
6380# qhasm:       xmm5 = xmm10
6381# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
6382# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
6383movdqa %xmm10,%xmm3
6384
6385# qhasm:       xmm4 = xmm14
6386# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
6387# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
6388movdqa %xmm14,%xmm4
6389
6390# qhasm:       xmm3 ^= xmm12
6391# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#1
6392# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm0
6393pxor  %xmm12,%xmm0
6394
6395# qhasm:       xmm2 ^= xmm10
6396# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#2
6397# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm1
6398pxor  %xmm10,%xmm1
6399
6400# qhasm:       xmm1 ^= xmm11
6401# asm 1: pxor  <xmm11=int6464#12,<xmm1=int6464#3
6402# asm 2: pxor  <xmm11=%xmm11,<xmm1=%xmm2
6403pxor  %xmm11,%xmm2
6404
6405# qhasm:       xmm5 ^= xmm12
6406# asm 1: pxor  <xmm12=int6464#13,<xmm5=int6464#4
6407# asm 2: pxor  <xmm12=%xmm12,<xmm5=%xmm3
6408pxor  %xmm12,%xmm3
6409
6410# qhasm:       xmm4 ^= xmm8
6411# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
6412# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
6413pxor  %xmm8,%xmm4
6414
6415# qhasm:       xmm6 = xmm3
6416# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
6417# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
6418movdqa %xmm0,%xmm5
6419
6420# qhasm:       xmm0 = xmm2
6421# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
6422# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
6423movdqa %xmm1,%xmm6
6424
6425# qhasm:       xmm7 = xmm3
6426# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
6427# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
6428movdqa %xmm0,%xmm7
6429
6430# qhasm:       xmm2 |= xmm1
6431# asm 1: por   <xmm1=int6464#3,<xmm2=int6464#2
6432# asm 2: por   <xmm1=%xmm2,<xmm2=%xmm1
6433por   %xmm2,%xmm1
6434
6435# qhasm:       xmm3 |= xmm4
6436# asm 1: por   <xmm4=int6464#5,<xmm3=int6464#1
6437# asm 2: por   <xmm4=%xmm4,<xmm3=%xmm0
6438por   %xmm4,%xmm0
6439
6440# qhasm:       xmm7 ^= xmm0
6441# asm 1: pxor  <xmm0=int6464#7,<xmm7=int6464#8
6442# asm 2: pxor  <xmm0=%xmm6,<xmm7=%xmm7
6443pxor  %xmm6,%xmm7
6444
6445# qhasm:       xmm6 &= xmm4
6446# asm 1: pand  <xmm4=int6464#5,<xmm6=int6464#6
6447# asm 2: pand  <xmm4=%xmm4,<xmm6=%xmm5
6448pand  %xmm4,%xmm5
6449
6450# qhasm:       xmm0 &= xmm1
6451# asm 1: pand  <xmm1=int6464#3,<xmm0=int6464#7
6452# asm 2: pand  <xmm1=%xmm2,<xmm0=%xmm6
6453pand  %xmm2,%xmm6
6454
6455# qhasm:       xmm4 ^= xmm1
6456# asm 1: pxor  <xmm1=int6464#3,<xmm4=int6464#5
6457# asm 2: pxor  <xmm1=%xmm2,<xmm4=%xmm4
6458pxor  %xmm2,%xmm4
6459
6460# qhasm:       xmm7 &= xmm4
6461# asm 1: pand  <xmm4=int6464#5,<xmm7=int6464#8
6462# asm 2: pand  <xmm4=%xmm4,<xmm7=%xmm7
6463pand  %xmm4,%xmm7
6464
6465# qhasm:       xmm4 = xmm11
6466# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
6467# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
6468movdqa %xmm11,%xmm2
6469
6470# qhasm:       xmm4 ^= xmm8
6471# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#3
6472# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm2
6473pxor  %xmm8,%xmm2
6474
6475# qhasm:       xmm5 &= xmm4
6476# asm 1: pand  <xmm4=int6464#3,<xmm5=int6464#4
6477# asm 2: pand  <xmm4=%xmm2,<xmm5=%xmm3
6478pand  %xmm2,%xmm3
6479
6480# qhasm:       xmm3 ^= xmm5
6481# asm 1: pxor  <xmm5=int6464#4,<xmm3=int6464#1
6482# asm 2: pxor  <xmm5=%xmm3,<xmm3=%xmm0
6483pxor  %xmm3,%xmm0
6484
6485# qhasm:       xmm2 ^= xmm5
6486# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
6487# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
6488pxor  %xmm3,%xmm1
6489
6490# qhasm:       xmm5 = xmm15
6491# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
6492# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
6493movdqa %xmm15,%xmm2
6494
6495# qhasm:       xmm5 ^= xmm9
6496# asm 1: pxor  <xmm9=int6464#10,<xmm5=int6464#3
6497# asm 2: pxor  <xmm9=%xmm9,<xmm5=%xmm2
6498pxor  %xmm9,%xmm2
6499
6500# qhasm:       xmm4 = xmm13
6501# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
6502# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
6503movdqa %xmm13,%xmm3
6504
6505# qhasm:       xmm1 = xmm5
6506# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
6507# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
6508movdqa %xmm2,%xmm4
6509
6510# qhasm:       xmm4 ^= xmm14
6511# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#4
6512# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm3
6513pxor  %xmm14,%xmm3
6514
6515# qhasm:       xmm1 |= xmm4
6516# asm 1: por   <xmm4=int6464#4,<xmm1=int6464#5
6517# asm 2: por   <xmm4=%xmm3,<xmm1=%xmm4
6518por   %xmm3,%xmm4
6519
6520# qhasm:       xmm5 &= xmm4
6521# asm 1: pand  <xmm4=int6464#4,<xmm5=int6464#3
6522# asm 2: pand  <xmm4=%xmm3,<xmm5=%xmm2
6523pand  %xmm3,%xmm2
6524
6525# qhasm:       xmm0 ^= xmm5
6526# asm 1: pxor  <xmm5=int6464#3,<xmm0=int6464#7
6527# asm 2: pxor  <xmm5=%xmm2,<xmm0=%xmm6
6528pxor  %xmm2,%xmm6
6529
6530# qhasm:       xmm3 ^= xmm7
6531# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#1
6532# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm0
6533pxor  %xmm7,%xmm0
6534
6535# qhasm:       xmm2 ^= xmm6
6536# asm 1: pxor  <xmm6=int6464#6,<xmm2=int6464#2
6537# asm 2: pxor  <xmm6=%xmm5,<xmm2=%xmm1
6538pxor  %xmm5,%xmm1
6539
6540# qhasm:       xmm1 ^= xmm7
6541# asm 1: pxor  <xmm7=int6464#8,<xmm1=int6464#5
6542# asm 2: pxor  <xmm7=%xmm7,<xmm1=%xmm4
6543pxor  %xmm7,%xmm4
6544
6545# qhasm:       xmm0 ^= xmm6
6546# asm 1: pxor  <xmm6=int6464#6,<xmm0=int6464#7
6547# asm 2: pxor  <xmm6=%xmm5,<xmm0=%xmm6
6548pxor  %xmm5,%xmm6
6549
6550# qhasm:       xmm1 ^= xmm6
6551# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
6552# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
6553pxor  %xmm5,%xmm4
6554
6555# qhasm:       xmm4 = xmm10
6556# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
6557# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
6558movdqa %xmm10,%xmm2
6559
6560# qhasm:       xmm5 = xmm12
6561# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
6562# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
6563movdqa %xmm12,%xmm3
6564
6565# qhasm:       xmm6 = xmm9
6566# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
6567# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
6568movdqa %xmm9,%xmm5
6569
6570# qhasm:       xmm7 = xmm15
6571# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
6572# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
6573movdqa %xmm15,%xmm7
6574
6575# qhasm:       xmm4 &= xmm11
6576# asm 1: pand  <xmm11=int6464#12,<xmm4=int6464#3
6577# asm 2: pand  <xmm11=%xmm11,<xmm4=%xmm2
6578pand  %xmm11,%xmm2
6579
6580# qhasm:       xmm5 &= xmm8
6581# asm 1: pand  <xmm8=int6464#9,<xmm5=int6464#4
6582# asm 2: pand  <xmm8=%xmm8,<xmm5=%xmm3
6583pand  %xmm8,%xmm3
6584
6585# qhasm:       xmm6 &= xmm13
6586# asm 1: pand  <xmm13=int6464#14,<xmm6=int6464#6
6587# asm 2: pand  <xmm13=%xmm13,<xmm6=%xmm5
6588pand  %xmm13,%xmm5
6589
6590# qhasm:       xmm7 |= xmm14
6591# asm 1: por   <xmm14=int6464#15,<xmm7=int6464#8
6592# asm 2: por   <xmm14=%xmm14,<xmm7=%xmm7
6593por   %xmm14,%xmm7
6594
6595# qhasm:       xmm3 ^= xmm4
6596# asm 1: pxor  <xmm4=int6464#3,<xmm3=int6464#1
6597# asm 2: pxor  <xmm4=%xmm2,<xmm3=%xmm0
6598pxor  %xmm2,%xmm0
6599
6600# qhasm:       xmm2 ^= xmm5
6601# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
6602# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
6603pxor  %xmm3,%xmm1
6604
6605# qhasm:       xmm1 ^= xmm6
6606# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
6607# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
6608pxor  %xmm5,%xmm4
6609
6610# qhasm:       xmm0 ^= xmm7
6611# asm 1: pxor  <xmm7=int6464#8,<xmm0=int6464#7
6612# asm 2: pxor  <xmm7=%xmm7,<xmm0=%xmm6
6613pxor  %xmm7,%xmm6
6614
6615# qhasm:       xmm4 = xmm3
6616# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
6617# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
6618movdqa %xmm0,%xmm2
6619
6620# qhasm:       xmm4 ^= xmm2
6621# asm 1: pxor  <xmm2=int6464#2,<xmm4=int6464#3
6622# asm 2: pxor  <xmm2=%xmm1,<xmm4=%xmm2
6623pxor  %xmm1,%xmm2
6624
6625# qhasm:       xmm3 &= xmm1
6626# asm 1: pand  <xmm1=int6464#5,<xmm3=int6464#1
6627# asm 2: pand  <xmm1=%xmm4,<xmm3=%xmm0
6628pand  %xmm4,%xmm0
6629
6630# qhasm:       xmm6 = xmm0
6631# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
6632# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
6633movdqa %xmm6,%xmm3
6634
6635# qhasm:       xmm6 ^= xmm3
6636# asm 1: pxor  <xmm3=int6464#1,<xmm6=int6464#4
6637# asm 2: pxor  <xmm3=%xmm0,<xmm6=%xmm3
6638pxor  %xmm0,%xmm3
6639
6640# qhasm:       xmm7 = xmm4
6641# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
6642# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
6643movdqa %xmm2,%xmm5
6644
6645# qhasm:       xmm7 &= xmm6
6646# asm 1: pand  <xmm6=int6464#4,<xmm7=int6464#6
6647# asm 2: pand  <xmm6=%xmm3,<xmm7=%xmm5
6648pand  %xmm3,%xmm5
6649
6650# qhasm:       xmm7 ^= xmm2
6651# asm 1: pxor  <xmm2=int6464#2,<xmm7=int6464#6
6652# asm 2: pxor  <xmm2=%xmm1,<xmm7=%xmm5
6653pxor  %xmm1,%xmm5
6654
6655# qhasm:       xmm5 = xmm1
6656# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
6657# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
6658movdqa %xmm4,%xmm7
6659
6660# qhasm:       xmm5 ^= xmm0
6661# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
6662# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
6663pxor  %xmm6,%xmm7
6664
6665# qhasm:       xmm3 ^= xmm2
6666# asm 1: pxor  <xmm2=int6464#2,<xmm3=int6464#1
6667# asm 2: pxor  <xmm2=%xmm1,<xmm3=%xmm0
6668pxor  %xmm1,%xmm0
6669
6670# qhasm:       xmm5 &= xmm3
6671# asm 1: pand  <xmm3=int6464#1,<xmm5=int6464#8
6672# asm 2: pand  <xmm3=%xmm0,<xmm5=%xmm7
6673pand  %xmm0,%xmm7
6674
6675# qhasm:       xmm5 ^= xmm0
6676# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
6677# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
6678pxor  %xmm6,%xmm7
6679
6680# qhasm:       xmm1 ^= xmm5
6681# asm 1: pxor  <xmm5=int6464#8,<xmm1=int6464#5
6682# asm 2: pxor  <xmm5=%xmm7,<xmm1=%xmm4
6683pxor  %xmm7,%xmm4
6684
6685# qhasm:       xmm2 = xmm6
6686# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
6687# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
6688movdqa %xmm3,%xmm0
6689
6690# qhasm:       xmm2 ^= xmm5
6691# asm 1: pxor  <xmm5=int6464#8,<xmm2=int6464#1
6692# asm 2: pxor  <xmm5=%xmm7,<xmm2=%xmm0
6693pxor  %xmm7,%xmm0
6694
6695# qhasm:       xmm2 &= xmm0
6696# asm 1: pand  <xmm0=int6464#7,<xmm2=int6464#1
6697# asm 2: pand  <xmm0=%xmm6,<xmm2=%xmm0
6698pand  %xmm6,%xmm0
6699
6700# qhasm:       xmm1 ^= xmm2
6701# asm 1: pxor  <xmm2=int6464#1,<xmm1=int6464#5
6702# asm 2: pxor  <xmm2=%xmm0,<xmm1=%xmm4
6703pxor  %xmm0,%xmm4
6704
6705# qhasm:       xmm6 ^= xmm2
6706# asm 1: pxor  <xmm2=int6464#1,<xmm6=int6464#4
6707# asm 2: pxor  <xmm2=%xmm0,<xmm6=%xmm3
6708pxor  %xmm0,%xmm3
6709
6710# qhasm:       xmm6 &= xmm7
6711# asm 1: pand  <xmm7=int6464#6,<xmm6=int6464#4
6712# asm 2: pand  <xmm7=%xmm5,<xmm6=%xmm3
6713pand  %xmm5,%xmm3
6714
6715# qhasm:       xmm6 ^= xmm4
6716# asm 1: pxor  <xmm4=int6464#3,<xmm6=int6464#4
6717# asm 2: pxor  <xmm4=%xmm2,<xmm6=%xmm3
6718pxor  %xmm2,%xmm3
6719
6720# qhasm:         xmm4 = xmm14
6721# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
6722# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
6723movdqa %xmm14,%xmm0
6724
6725# qhasm:         xmm0 = xmm13
6726# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
6727# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
6728movdqa %xmm13,%xmm1
6729
6730# qhasm:           xmm2 = xmm7
6731# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
6732# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
6733movdqa %xmm5,%xmm2
6734
6735# qhasm:           xmm2 ^= xmm6
6736# asm 1: pxor  <xmm6=int6464#4,<xmm2=int6464#3
6737# asm 2: pxor  <xmm6=%xmm3,<xmm2=%xmm2
6738pxor  %xmm3,%xmm2
6739
6740# qhasm:           xmm2 &= xmm14
6741# asm 1: pand  <xmm14=int6464#15,<xmm2=int6464#3
6742# asm 2: pand  <xmm14=%xmm14,<xmm2=%xmm2
6743pand  %xmm14,%xmm2
6744
6745# qhasm:           xmm14 ^= xmm13
6746# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
6747# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
6748pxor  %xmm13,%xmm14
6749
6750# qhasm:           xmm14 &= xmm6
6751# asm 1: pand  <xmm6=int6464#4,<xmm14=int6464#15
6752# asm 2: pand  <xmm6=%xmm3,<xmm14=%xmm14
6753pand  %xmm3,%xmm14
6754
6755# qhasm:           xmm13 &= xmm7
6756# asm 1: pand  <xmm7=int6464#6,<xmm13=int6464#14
6757# asm 2: pand  <xmm7=%xmm5,<xmm13=%xmm13
6758pand  %xmm5,%xmm13
6759
6760# qhasm:           xmm14 ^= xmm13
6761# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
6762# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
6763pxor  %xmm13,%xmm14
6764
6765# qhasm:           xmm13 ^= xmm2
6766# asm 1: pxor  <xmm2=int6464#3,<xmm13=int6464#14
6767# asm 2: pxor  <xmm2=%xmm2,<xmm13=%xmm13
6768pxor  %xmm2,%xmm13
6769
6770# qhasm:         xmm4 ^= xmm8
6771# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#1
6772# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm0
6773pxor  %xmm8,%xmm0
6774
6775# qhasm:         xmm0 ^= xmm11
6776# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#2
6777# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm1
6778pxor  %xmm11,%xmm1
6779
6780# qhasm:         xmm7 ^= xmm5
6781# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
6782# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
6783pxor  %xmm7,%xmm5
6784
6785# qhasm:         xmm6 ^= xmm1
6786# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
6787# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
6788pxor  %xmm4,%xmm3
6789
6790# qhasm:           xmm3 = xmm7
6791# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6792# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6793movdqa %xmm5,%xmm2
6794
6795# qhasm:           xmm3 ^= xmm6
6796# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
6797# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
6798pxor  %xmm3,%xmm2
6799
6800# qhasm:           xmm3 &= xmm4
6801# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
6802# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
6803pand  %xmm0,%xmm2
6804
6805# qhasm:           xmm4 ^= xmm0
6806# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
6807# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
6808pxor  %xmm1,%xmm0
6809
6810# qhasm:           xmm4 &= xmm6
6811# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
6812# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
6813pand  %xmm3,%xmm0
6814
6815# qhasm:           xmm0 &= xmm7
6816# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
6817# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
6818pand  %xmm5,%xmm1
6819
6820# qhasm:           xmm0 ^= xmm4
6821# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
6822# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
6823pxor  %xmm0,%xmm1
6824
6825# qhasm:           xmm4 ^= xmm3
6826# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
6827# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
6828pxor  %xmm2,%xmm0
6829
6830# qhasm:           xmm2 = xmm5
6831# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6832# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6833movdqa %xmm7,%xmm2
6834
6835# qhasm:           xmm2 ^= xmm1
6836# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
6837# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
6838pxor  %xmm4,%xmm2
6839
6840# qhasm:           xmm2 &= xmm8
6841# asm 1: pand  <xmm8=int6464#9,<xmm2=int6464#3
6842# asm 2: pand  <xmm8=%xmm8,<xmm2=%xmm2
6843pand  %xmm8,%xmm2
6844
6845# qhasm:           xmm8 ^= xmm11
6846# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
6847# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
6848pxor  %xmm11,%xmm8
6849
6850# qhasm:           xmm8 &= xmm1
6851# asm 1: pand  <xmm1=int6464#5,<xmm8=int6464#9
6852# asm 2: pand  <xmm1=%xmm4,<xmm8=%xmm8
6853pand  %xmm4,%xmm8
6854
6855# qhasm:           xmm11 &= xmm5
6856# asm 1: pand  <xmm5=int6464#8,<xmm11=int6464#12
6857# asm 2: pand  <xmm5=%xmm7,<xmm11=%xmm11
6858pand  %xmm7,%xmm11
6859
6860# qhasm:           xmm8 ^= xmm11
6861# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
6862# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
6863pxor  %xmm11,%xmm8
6864
6865# qhasm:           xmm11 ^= xmm2
6866# asm 1: pxor  <xmm2=int6464#3,<xmm11=int6464#12
6867# asm 2: pxor  <xmm2=%xmm2,<xmm11=%xmm11
6868pxor  %xmm2,%xmm11
6869
6870# qhasm:         xmm14 ^= xmm4
6871# asm 1: pxor  <xmm4=int6464#1,<xmm14=int6464#15
6872# asm 2: pxor  <xmm4=%xmm0,<xmm14=%xmm14
6873pxor  %xmm0,%xmm14
6874
6875# qhasm:         xmm8 ^= xmm4
6876# asm 1: pxor  <xmm4=int6464#1,<xmm8=int6464#9
6877# asm 2: pxor  <xmm4=%xmm0,<xmm8=%xmm8
6878pxor  %xmm0,%xmm8
6879
6880# qhasm:         xmm13 ^= xmm0
6881# asm 1: pxor  <xmm0=int6464#2,<xmm13=int6464#14
6882# asm 2: pxor  <xmm0=%xmm1,<xmm13=%xmm13
6883pxor  %xmm1,%xmm13
6884
6885# qhasm:         xmm11 ^= xmm0
6886# asm 1: pxor  <xmm0=int6464#2,<xmm11=int6464#12
6887# asm 2: pxor  <xmm0=%xmm1,<xmm11=%xmm11
6888pxor  %xmm1,%xmm11
6889
6890# qhasm:         xmm4 = xmm15
6891# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
6892# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
6893movdqa %xmm15,%xmm0
6894
6895# qhasm:         xmm0 = xmm9
6896# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
6897# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
6898movdqa %xmm9,%xmm1
6899
6900# qhasm:         xmm4 ^= xmm12
6901# asm 1: pxor  <xmm12=int6464#13,<xmm4=int6464#1
6902# asm 2: pxor  <xmm12=%xmm12,<xmm4=%xmm0
6903pxor  %xmm12,%xmm0
6904
6905# qhasm:         xmm0 ^= xmm10
6906# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#2
6907# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm1
6908pxor  %xmm10,%xmm1
6909
6910# qhasm:           xmm3 = xmm7
6911# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6912# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6913movdqa %xmm5,%xmm2
6914
6915# qhasm:           xmm3 ^= xmm6
6916# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
6917# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
6918pxor  %xmm3,%xmm2
6919
6920# qhasm:           xmm3 &= xmm4
6921# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
6922# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
6923pand  %xmm0,%xmm2
6924
6925# qhasm:           xmm4 ^= xmm0
6926# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
6927# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
6928pxor  %xmm1,%xmm0
6929
6930# qhasm:           xmm4 &= xmm6
6931# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
6932# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
6933pand  %xmm3,%xmm0
6934
6935# qhasm:           xmm0 &= xmm7
6936# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
6937# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
6938pand  %xmm5,%xmm1
6939
6940# qhasm:           xmm0 ^= xmm4
6941# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
6942# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
6943pxor  %xmm0,%xmm1
6944
6945# qhasm:           xmm4 ^= xmm3
6946# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
6947# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
6948pxor  %xmm2,%xmm0
6949
6950# qhasm:           xmm2 = xmm5
6951# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6952# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6953movdqa %xmm7,%xmm2
6954
6955# qhasm:           xmm2 ^= xmm1
6956# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
6957# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
6958pxor  %xmm4,%xmm2
6959
6960# qhasm:           xmm2 &= xmm12
6961# asm 1: pand  <xmm12=int6464#13,<xmm2=int6464#3
6962# asm 2: pand  <xmm12=%xmm12,<xmm2=%xmm2
6963pand  %xmm12,%xmm2
6964
6965# qhasm:           xmm12 ^= xmm10
6966# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
6967# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
6968pxor  %xmm10,%xmm12
6969
6970# qhasm:           xmm12 &= xmm1
6971# asm 1: pand  <xmm1=int6464#5,<xmm12=int6464#13
6972# asm 2: pand  <xmm1=%xmm4,<xmm12=%xmm12
6973pand  %xmm4,%xmm12
6974
6975# qhasm:           xmm10 &= xmm5
6976# asm 1: pand  <xmm5=int6464#8,<xmm10=int6464#11
6977# asm 2: pand  <xmm5=%xmm7,<xmm10=%xmm10
6978pand  %xmm7,%xmm10
6979
6980# qhasm:           xmm12 ^= xmm10
6981# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
6982# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
6983pxor  %xmm10,%xmm12
6984
6985# qhasm:           xmm10 ^= xmm2
6986# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#11
6987# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm10
6988pxor  %xmm2,%xmm10
6989
6990# qhasm:         xmm7 ^= xmm5
6991# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
6992# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
6993pxor  %xmm7,%xmm5
6994
6995# qhasm:         xmm6 ^= xmm1
6996# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
6997# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
6998pxor  %xmm4,%xmm3
6999
7000# qhasm:           xmm3 = xmm7
7001# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
7002# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
7003movdqa %xmm5,%xmm2
7004
7005# qhasm:           xmm3 ^= xmm6
7006# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
7007# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
7008pxor  %xmm3,%xmm2
7009
7010# qhasm:           xmm3 &= xmm15
7011# asm 1: pand  <xmm15=int6464#16,<xmm3=int6464#3
7012# asm 2: pand  <xmm15=%xmm15,<xmm3=%xmm2
7013pand  %xmm15,%xmm2
7014
7015# qhasm:           xmm15 ^= xmm9
7016# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
7017# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
7018pxor  %xmm9,%xmm15
7019
7020# qhasm:           xmm15 &= xmm6
7021# asm 1: pand  <xmm6=int6464#4,<xmm15=int6464#16
7022# asm 2: pand  <xmm6=%xmm3,<xmm15=%xmm15
7023pand  %xmm3,%xmm15
7024
7025# qhasm:           xmm9 &= xmm7
7026# asm 1: pand  <xmm7=int6464#6,<xmm9=int6464#10
7027# asm 2: pand  <xmm7=%xmm5,<xmm9=%xmm9
7028pand  %xmm5,%xmm9
7029
7030# qhasm:           xmm15 ^= xmm9
7031# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
7032# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
7033pxor  %xmm9,%xmm15
7034
7035# qhasm:           xmm9 ^= xmm3
7036# asm 1: pxor  <xmm3=int6464#3,<xmm9=int6464#10
7037# asm 2: pxor  <xmm3=%xmm2,<xmm9=%xmm9
7038pxor  %xmm2,%xmm9
7039
7040# qhasm:         xmm15 ^= xmm4
7041# asm 1: pxor  <xmm4=int6464#1,<xmm15=int6464#16
7042# asm 2: pxor  <xmm4=%xmm0,<xmm15=%xmm15
7043pxor  %xmm0,%xmm15
7044
7045# qhasm:         xmm12 ^= xmm4
7046# asm 1: pxor  <xmm4=int6464#1,<xmm12=int6464#13
7047# asm 2: pxor  <xmm4=%xmm0,<xmm12=%xmm12
7048pxor  %xmm0,%xmm12
7049
7050# qhasm:         xmm9 ^= xmm0
7051# asm 1: pxor  <xmm0=int6464#2,<xmm9=int6464#10
7052# asm 2: pxor  <xmm0=%xmm1,<xmm9=%xmm9
7053pxor  %xmm1,%xmm9
7054
7055# qhasm:         xmm10 ^= xmm0
7056# asm 1: pxor  <xmm0=int6464#2,<xmm10=int6464#11
7057# asm 2: pxor  <xmm0=%xmm1,<xmm10=%xmm10
7058pxor  %xmm1,%xmm10
7059
7060# qhasm:       xmm15 ^= xmm8
7061# asm 1: pxor  <xmm8=int6464#9,<xmm15=int6464#16
7062# asm 2: pxor  <xmm8=%xmm8,<xmm15=%xmm15
7063pxor  %xmm8,%xmm15
7064
7065# qhasm:       xmm9 ^= xmm14
7066# asm 1: pxor  <xmm14=int6464#15,<xmm9=int6464#10
7067# asm 2: pxor  <xmm14=%xmm14,<xmm9=%xmm9
7068pxor  %xmm14,%xmm9
7069
7070# qhasm:       xmm12 ^= xmm15
7071# asm 1: pxor  <xmm15=int6464#16,<xmm12=int6464#13
7072# asm 2: pxor  <xmm15=%xmm15,<xmm12=%xmm12
7073pxor  %xmm15,%xmm12
7074
7075# qhasm:       xmm14 ^= xmm8
7076# asm 1: pxor  <xmm8=int6464#9,<xmm14=int6464#15
7077# asm 2: pxor  <xmm8=%xmm8,<xmm14=%xmm14
7078pxor  %xmm8,%xmm14
7079
7080# qhasm:       xmm8 ^= xmm9
7081# asm 1: pxor  <xmm9=int6464#10,<xmm8=int6464#9
7082# asm 2: pxor  <xmm9=%xmm9,<xmm8=%xmm8
7083pxor  %xmm9,%xmm8
7084
7085# qhasm:       xmm9 ^= xmm13
7086# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
7087# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
7088pxor  %xmm13,%xmm9
7089
7090# qhasm:       xmm13 ^= xmm10
7091# asm 1: pxor  <xmm10=int6464#11,<xmm13=int6464#14
7092# asm 2: pxor  <xmm10=%xmm10,<xmm13=%xmm13
7093pxor  %xmm10,%xmm13
7094
7095# qhasm:       xmm12 ^= xmm13
7096# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
7097# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
7098pxor  %xmm13,%xmm12
7099
7100# qhasm:       xmm10 ^= xmm11
7101# asm 1: pxor  <xmm11=int6464#12,<xmm10=int6464#11
7102# asm 2: pxor  <xmm11=%xmm11,<xmm10=%xmm10
7103pxor  %xmm11,%xmm10
7104
7105# qhasm:       xmm11 ^= xmm13
7106# asm 1: pxor  <xmm13=int6464#14,<xmm11=int6464#12
7107# asm 2: pxor  <xmm13=%xmm13,<xmm11=%xmm11
7108pxor  %xmm13,%xmm11
7109
7110# qhasm:       xmm14 ^= xmm11
7111# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
7112# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
7113pxor  %xmm11,%xmm14
7114
7115# qhasm:     xmm0 = shuffle dwords of xmm8 by 0x93
7116# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
7117# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
7118pshufd $0x93,%xmm8,%xmm0
7119
7120# qhasm:     xmm1 = shuffle dwords of xmm9 by 0x93
7121# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
7122# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
7123pshufd $0x93,%xmm9,%xmm1
7124
7125# qhasm:     xmm2 = shuffle dwords of xmm12 by 0x93
7126# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
7127# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
7128pshufd $0x93,%xmm12,%xmm2
7129
7130# qhasm:     xmm3 = shuffle dwords of xmm14 by 0x93
7131# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
7132# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
7133pshufd $0x93,%xmm14,%xmm3
7134
7135# qhasm:     xmm4 = shuffle dwords of xmm11 by 0x93
7136# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
7137# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
7138pshufd $0x93,%xmm11,%xmm4
7139
7140# qhasm:     xmm5 = shuffle dwords of xmm15 by 0x93
7141# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
7142# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
7143pshufd $0x93,%xmm15,%xmm5
7144
7145# qhasm:     xmm6 = shuffle dwords of xmm10 by 0x93
7146# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
7147# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
7148pshufd $0x93,%xmm10,%xmm6
7149
7150# qhasm:     xmm7 = shuffle dwords of xmm13 by 0x93
7151# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
7152# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
7153pshufd $0x93,%xmm13,%xmm7
7154
7155# qhasm:     xmm8 ^= xmm0
7156# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
7157# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
7158pxor  %xmm0,%xmm8
7159
7160# qhasm:     xmm9 ^= xmm1
7161# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
7162# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
7163pxor  %xmm1,%xmm9
7164
7165# qhasm:     xmm12 ^= xmm2
7166# asm 1: pxor  <xmm2=int6464#3,<xmm12=int6464#13
7167# asm 2: pxor  <xmm2=%xmm2,<xmm12=%xmm12
7168pxor  %xmm2,%xmm12
7169
7170# qhasm:     xmm14 ^= xmm3
7171# asm 1: pxor  <xmm3=int6464#4,<xmm14=int6464#15
7172# asm 2: pxor  <xmm3=%xmm3,<xmm14=%xmm14
7173pxor  %xmm3,%xmm14
7174
7175# qhasm:     xmm11 ^= xmm4
7176# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
7177# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
7178pxor  %xmm4,%xmm11
7179
7180# qhasm:     xmm15 ^= xmm5
7181# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
7182# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
7183pxor  %xmm5,%xmm15
7184
7185# qhasm:     xmm10 ^= xmm6
7186# asm 1: pxor  <xmm6=int6464#7,<xmm10=int6464#11
7187# asm 2: pxor  <xmm6=%xmm6,<xmm10=%xmm10
7188pxor  %xmm6,%xmm10
7189
7190# qhasm:     xmm13 ^= xmm7
7191# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
7192# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
7193pxor  %xmm7,%xmm13
7194
7195# qhasm:     xmm0 ^= xmm13
7196# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
7197# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
7198pxor  %xmm13,%xmm0
7199
7200# qhasm:     xmm1 ^= xmm8
7201# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
7202# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
7203pxor  %xmm8,%xmm1
7204
7205# qhasm:     xmm2 ^= xmm9
7206# asm 1: pxor  <xmm9=int6464#10,<xmm2=int6464#3
7207# asm 2: pxor  <xmm9=%xmm9,<xmm2=%xmm2
7208pxor  %xmm9,%xmm2
7209
7210# qhasm:     xmm1 ^= xmm13
7211# asm 1: pxor  <xmm13=int6464#14,<xmm1=int6464#2
7212# asm 2: pxor  <xmm13=%xmm13,<xmm1=%xmm1
7213pxor  %xmm13,%xmm1
7214
7215# qhasm:     xmm3 ^= xmm12
7216# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
7217# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
7218pxor  %xmm12,%xmm3
7219
7220# qhasm:     xmm4 ^= xmm14
7221# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#5
7222# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm4
7223pxor  %xmm14,%xmm4
7224
7225# qhasm:     xmm5 ^= xmm11
7226# asm 1: pxor  <xmm11=int6464#12,<xmm5=int6464#6
7227# asm 2: pxor  <xmm11=%xmm11,<xmm5=%xmm5
7228pxor  %xmm11,%xmm5
7229
7230# qhasm:     xmm3 ^= xmm13
7231# asm 1: pxor  <xmm13=int6464#14,<xmm3=int6464#4
7232# asm 2: pxor  <xmm13=%xmm13,<xmm3=%xmm3
7233pxor  %xmm13,%xmm3
7234
7235# qhasm:     xmm6 ^= xmm15
7236# asm 1: pxor  <xmm15=int6464#16,<xmm6=int6464#7
7237# asm 2: pxor  <xmm15=%xmm15,<xmm6=%xmm6
7238pxor  %xmm15,%xmm6
7239
7240# qhasm:     xmm7 ^= xmm10
7241# asm 1: pxor  <xmm10=int6464#11,<xmm7=int6464#8
7242# asm 2: pxor  <xmm10=%xmm10,<xmm7=%xmm7
7243pxor  %xmm10,%xmm7
7244
7245# qhasm:     xmm4 ^= xmm13
7246# asm 1: pxor  <xmm13=int6464#14,<xmm4=int6464#5
7247# asm 2: pxor  <xmm13=%xmm13,<xmm4=%xmm4
7248pxor  %xmm13,%xmm4
7249
7250# qhasm:     xmm8 = shuffle dwords of xmm8 by 0x4E
7251# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
7252# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
7253pshufd $0x4E,%xmm8,%xmm8
7254
7255# qhasm:     xmm9 = shuffle dwords of xmm9 by 0x4E
7256# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
7257# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
7258pshufd $0x4E,%xmm9,%xmm9
7259
7260# qhasm:     xmm12 = shuffle dwords of xmm12 by 0x4E
7261# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
7262# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
7263pshufd $0x4E,%xmm12,%xmm12
7264
7265# qhasm:     xmm14 = shuffle dwords of xmm14 by 0x4E
7266# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
7267# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
7268pshufd $0x4E,%xmm14,%xmm14
7269
7270# qhasm:     xmm11 = shuffle dwords of xmm11 by 0x4E
7271# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
7272# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
7273pshufd $0x4E,%xmm11,%xmm11
7274
7275# qhasm:     xmm15 = shuffle dwords of xmm15 by 0x4E
7276# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
7277# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
7278pshufd $0x4E,%xmm15,%xmm15
7279
7280# qhasm:     xmm10 = shuffle dwords of xmm10 by 0x4E
7281# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
7282# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
7283pshufd $0x4E,%xmm10,%xmm10
7284
7285# qhasm:     xmm13 = shuffle dwords of xmm13 by 0x4E
7286# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
7287# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
7288pshufd $0x4E,%xmm13,%xmm13
7289
7290# qhasm:     xmm0 ^= xmm8
7291# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
7292# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
7293pxor  %xmm8,%xmm0
7294
7295# qhasm:     xmm1 ^= xmm9
7296# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
7297# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
7298pxor  %xmm9,%xmm1
7299
7300# qhasm:     xmm2 ^= xmm12
7301# asm 1: pxor  <xmm12=int6464#13,<xmm2=int6464#3
7302# asm 2: pxor  <xmm12=%xmm12,<xmm2=%xmm2
7303pxor  %xmm12,%xmm2
7304
7305# qhasm:     xmm3 ^= xmm14
7306# asm 1: pxor  <xmm14=int6464#15,<xmm3=int6464#4
7307# asm 2: pxor  <xmm14=%xmm14,<xmm3=%xmm3
7308pxor  %xmm14,%xmm3
7309
7310# qhasm:     xmm4 ^= xmm11
7311# asm 1: pxor  <xmm11=int6464#12,<xmm4=int6464#5
7312# asm 2: pxor  <xmm11=%xmm11,<xmm4=%xmm4
7313pxor  %xmm11,%xmm4
7314
7315# qhasm:     xmm5 ^= xmm15
7316# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
7317# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
7318pxor  %xmm15,%xmm5
7319
7320# qhasm:     xmm6 ^= xmm10
7321# asm 1: pxor  <xmm10=int6464#11,<xmm6=int6464#7
7322# asm 2: pxor  <xmm10=%xmm10,<xmm6=%xmm6
7323pxor  %xmm10,%xmm6
7324
7325# qhasm:     xmm7 ^= xmm13
7326# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
7327# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
7328pxor  %xmm13,%xmm7
7329
7330# qhasm:     xmm0 ^= *(int128 *)(c + 768)
7331# asm 1: pxor 768(<c=int64#4),<xmm0=int6464#1
7332# asm 2: pxor 768(<c=%rcx),<xmm0=%xmm0
7333pxor 768(%rcx),%xmm0
7334
7335# qhasm:     shuffle bytes of xmm0 by SR
7336# asm 1: pshufb SR,<xmm0=int6464#1
7337# asm 2: pshufb SR,<xmm0=%xmm0
7338pshufb SR,%xmm0
7339
7340# qhasm:     xmm1 ^= *(int128 *)(c + 784)
7341# asm 1: pxor 784(<c=int64#4),<xmm1=int6464#2
7342# asm 2: pxor 784(<c=%rcx),<xmm1=%xmm1
7343pxor 784(%rcx),%xmm1
7344
7345# qhasm:     shuffle bytes of xmm1 by SR
7346# asm 1: pshufb SR,<xmm1=int6464#2
7347# asm 2: pshufb SR,<xmm1=%xmm1
7348pshufb SR,%xmm1
7349
7350# qhasm:     xmm2 ^= *(int128 *)(c + 800)
7351# asm 1: pxor 800(<c=int64#4),<xmm2=int6464#3
7352# asm 2: pxor 800(<c=%rcx),<xmm2=%xmm2
7353pxor 800(%rcx),%xmm2
7354
7355# qhasm:     shuffle bytes of xmm2 by SR
7356# asm 1: pshufb SR,<xmm2=int6464#3
7357# asm 2: pshufb SR,<xmm2=%xmm2
7358pshufb SR,%xmm2
7359
7360# qhasm:     xmm3 ^= *(int128 *)(c + 816)
7361# asm 1: pxor 816(<c=int64#4),<xmm3=int6464#4
7362# asm 2: pxor 816(<c=%rcx),<xmm3=%xmm3
7363pxor 816(%rcx),%xmm3
7364
7365# qhasm:     shuffle bytes of xmm3 by SR
7366# asm 1: pshufb SR,<xmm3=int6464#4
7367# asm 2: pshufb SR,<xmm3=%xmm3
7368pshufb SR,%xmm3
7369
7370# qhasm:     xmm4 ^= *(int128 *)(c + 832)
7371# asm 1: pxor 832(<c=int64#4),<xmm4=int6464#5
7372# asm 2: pxor 832(<c=%rcx),<xmm4=%xmm4
7373pxor 832(%rcx),%xmm4
7374
7375# qhasm:     shuffle bytes of xmm4 by SR
7376# asm 1: pshufb SR,<xmm4=int6464#5
7377# asm 2: pshufb SR,<xmm4=%xmm4
7378pshufb SR,%xmm4
7379
7380# qhasm:     xmm5 ^= *(int128 *)(c + 848)
7381# asm 1: pxor 848(<c=int64#4),<xmm5=int6464#6
7382# asm 2: pxor 848(<c=%rcx),<xmm5=%xmm5
7383pxor 848(%rcx),%xmm5
7384
7385# qhasm:     shuffle bytes of xmm5 by SR
7386# asm 1: pshufb SR,<xmm5=int6464#6
7387# asm 2: pshufb SR,<xmm5=%xmm5
7388pshufb SR,%xmm5
7389
7390# qhasm:     xmm6 ^= *(int128 *)(c + 864)
7391# asm 1: pxor 864(<c=int64#4),<xmm6=int6464#7
7392# asm 2: pxor 864(<c=%rcx),<xmm6=%xmm6
7393pxor 864(%rcx),%xmm6
7394
7395# qhasm:     shuffle bytes of xmm6 by SR
7396# asm 1: pshufb SR,<xmm6=int6464#7
7397# asm 2: pshufb SR,<xmm6=%xmm6
7398pshufb SR,%xmm6
7399
7400# qhasm:     xmm7 ^= *(int128 *)(c + 880)
7401# asm 1: pxor 880(<c=int64#4),<xmm7=int6464#8
7402# asm 2: pxor 880(<c=%rcx),<xmm7=%xmm7
7403pxor 880(%rcx),%xmm7
7404
7405# qhasm:     shuffle bytes of xmm7 by SR
7406# asm 1: pshufb SR,<xmm7=int6464#8
7407# asm 2: pshufb SR,<xmm7=%xmm7
7408pshufb SR,%xmm7
7409
7410# qhasm:       xmm5 ^= xmm6
7411# asm 1: pxor  <xmm6=int6464#7,<xmm5=int6464#6
7412# asm 2: pxor  <xmm6=%xmm6,<xmm5=%xmm5
7413pxor  %xmm6,%xmm5
7414
7415# qhasm:       xmm2 ^= xmm1
7416# asm 1: pxor  <xmm1=int6464#2,<xmm2=int6464#3
7417# asm 2: pxor  <xmm1=%xmm1,<xmm2=%xmm2
7418pxor  %xmm1,%xmm2
7419
7420# qhasm:       xmm5 ^= xmm0
7421# asm 1: pxor  <xmm0=int6464#1,<xmm5=int6464#6
7422# asm 2: pxor  <xmm0=%xmm0,<xmm5=%xmm5
7423pxor  %xmm0,%xmm5
7424
7425# qhasm:       xmm6 ^= xmm2
7426# asm 1: pxor  <xmm2=int6464#3,<xmm6=int6464#7
7427# asm 2: pxor  <xmm2=%xmm2,<xmm6=%xmm6
7428pxor  %xmm2,%xmm6
7429
7430# qhasm:       xmm3 ^= xmm0
7431# asm 1: pxor  <xmm0=int6464#1,<xmm3=int6464#4
7432# asm 2: pxor  <xmm0=%xmm0,<xmm3=%xmm3
7433pxor  %xmm0,%xmm3
7434
7435# qhasm:       xmm6 ^= xmm3
7436# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
7437# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
7438pxor  %xmm3,%xmm6
7439
7440# qhasm:       xmm3 ^= xmm7
7441# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#4
7442# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm3
7443pxor  %xmm7,%xmm3
7444
7445# qhasm:       xmm3 ^= xmm4
7446# asm 1: pxor  <xmm4=int6464#5,<xmm3=int6464#4
7447# asm 2: pxor  <xmm4=%xmm4,<xmm3=%xmm3
7448pxor  %xmm4,%xmm3
7449
7450# qhasm:       xmm7 ^= xmm5
7451# asm 1: pxor  <xmm5=int6464#6,<xmm7=int6464#8
7452# asm 2: pxor  <xmm5=%xmm5,<xmm7=%xmm7
7453pxor  %xmm5,%xmm7
7454
7455# qhasm:       xmm3 ^= xmm1
7456# asm 1: pxor  <xmm1=int6464#2,<xmm3=int6464#4
7457# asm 2: pxor  <xmm1=%xmm1,<xmm3=%xmm3
7458pxor  %xmm1,%xmm3
7459
7460# qhasm:       xmm4 ^= xmm5
7461# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
7462# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
7463pxor  %xmm5,%xmm4
7464
7465# qhasm:       xmm2 ^= xmm7
7466# asm 1: pxor  <xmm7=int6464#8,<xmm2=int6464#3
7467# asm 2: pxor  <xmm7=%xmm7,<xmm2=%xmm2
7468pxor  %xmm7,%xmm2
7469
7470# qhasm:       xmm1 ^= xmm5
7471# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
7472# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
7473pxor  %xmm5,%xmm1
7474
7475# qhasm:       xmm11 = xmm7
7476# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
7477# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
7478movdqa %xmm7,%xmm8
7479
7480# qhasm:       xmm10 = xmm1
7481# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7482# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7483movdqa %xmm1,%xmm9
7484
7485# qhasm:       xmm9 = xmm5
7486# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
7487# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
7488movdqa %xmm5,%xmm10
7489
7490# qhasm:       xmm13 = xmm2
7491# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
7492# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
7493movdqa %xmm2,%xmm11
7494
7495# qhasm:       xmm12 = xmm6
7496# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
7497# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
7498movdqa %xmm6,%xmm12
7499
7500# qhasm:       xmm11 ^= xmm4
7501# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#9
7502# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm8
7503pxor  %xmm4,%xmm8
7504
7505# qhasm:       xmm10 ^= xmm2
7506# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#10
7507# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm9
7508pxor  %xmm2,%xmm9
7509
7510# qhasm:       xmm9 ^= xmm3
7511# asm 1: pxor  <xmm3=int6464#4,<xmm9=int6464#11
7512# asm 2: pxor  <xmm3=%xmm3,<xmm9=%xmm10
7513pxor  %xmm3,%xmm10
7514
7515# qhasm:       xmm13 ^= xmm4
7516# asm 1: pxor  <xmm4=int6464#5,<xmm13=int6464#12
7517# asm 2: pxor  <xmm4=%xmm4,<xmm13=%xmm11
7518pxor  %xmm4,%xmm11
7519
7520# qhasm:       xmm12 ^= xmm0
7521# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
7522# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
7523pxor  %xmm0,%xmm12
7524
7525# qhasm:       xmm14 = xmm11
7526# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7527# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7528movdqa %xmm8,%xmm13
7529
7530# qhasm:       xmm8 = xmm10
7531# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7532# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7533movdqa %xmm9,%xmm14
7534
7535# qhasm:       xmm15 = xmm11
7536# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7537# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7538movdqa %xmm8,%xmm15
7539
7540# qhasm:       xmm10 |= xmm9
7541# asm 1: por   <xmm9=int6464#11,<xmm10=int6464#10
7542# asm 2: por   <xmm9=%xmm10,<xmm10=%xmm9
7543por   %xmm10,%xmm9
7544
7545# qhasm:       xmm11 |= xmm12
7546# asm 1: por   <xmm12=int6464#13,<xmm11=int6464#9
7547# asm 2: por   <xmm12=%xmm12,<xmm11=%xmm8
7548por   %xmm12,%xmm8
7549
7550# qhasm:       xmm15 ^= xmm8
7551# asm 1: pxor  <xmm8=int6464#15,<xmm15=int6464#16
7552# asm 2: pxor  <xmm8=%xmm14,<xmm15=%xmm15
7553pxor  %xmm14,%xmm15
7554
7555# qhasm:       xmm14 &= xmm12
7556# asm 1: pand  <xmm12=int6464#13,<xmm14=int6464#14
7557# asm 2: pand  <xmm12=%xmm12,<xmm14=%xmm13
7558pand  %xmm12,%xmm13
7559
7560# qhasm:       xmm8 &= xmm9
7561# asm 1: pand  <xmm9=int6464#11,<xmm8=int6464#15
7562# asm 2: pand  <xmm9=%xmm10,<xmm8=%xmm14
7563pand  %xmm10,%xmm14
7564
7565# qhasm:       xmm12 ^= xmm9
7566# asm 1: pxor  <xmm9=int6464#11,<xmm12=int6464#13
7567# asm 2: pxor  <xmm9=%xmm10,<xmm12=%xmm12
7568pxor  %xmm10,%xmm12
7569
7570# qhasm:       xmm15 &= xmm12
7571# asm 1: pand  <xmm12=int6464#13,<xmm15=int6464#16
7572# asm 2: pand  <xmm12=%xmm12,<xmm15=%xmm15
7573pand  %xmm12,%xmm15
7574
7575# qhasm:       xmm12 = xmm3
7576# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
7577# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
7578movdqa %xmm3,%xmm10
7579
7580# qhasm:       xmm12 ^= xmm0
7581# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#11
7582# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm10
7583pxor  %xmm0,%xmm10
7584
7585# qhasm:       xmm13 &= xmm12
7586# asm 1: pand  <xmm12=int6464#11,<xmm13=int6464#12
7587# asm 2: pand  <xmm12=%xmm10,<xmm13=%xmm11
7588pand  %xmm10,%xmm11
7589
7590# qhasm:       xmm11 ^= xmm13
7591# asm 1: pxor  <xmm13=int6464#12,<xmm11=int6464#9
7592# asm 2: pxor  <xmm13=%xmm11,<xmm11=%xmm8
7593pxor  %xmm11,%xmm8
7594
7595# qhasm:       xmm10 ^= xmm13
7596# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
7597# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
7598pxor  %xmm11,%xmm9
7599
7600# qhasm:       xmm13 = xmm7
7601# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
7602# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
7603movdqa %xmm7,%xmm10
7604
7605# qhasm:       xmm13 ^= xmm1
7606# asm 1: pxor  <xmm1=int6464#2,<xmm13=int6464#11
7607# asm 2: pxor  <xmm1=%xmm1,<xmm13=%xmm10
7608pxor  %xmm1,%xmm10
7609
7610# qhasm:       xmm12 = xmm5
7611# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
7612# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
7613movdqa %xmm5,%xmm11
7614
7615# qhasm:       xmm9 = xmm13
7616# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7617# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7618movdqa %xmm10,%xmm12
7619
7620# qhasm:       xmm12 ^= xmm6
7621# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#12
7622# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm11
7623pxor  %xmm6,%xmm11
7624
7625# qhasm:       xmm9 |= xmm12
7626# asm 1: por   <xmm12=int6464#12,<xmm9=int6464#13
7627# asm 2: por   <xmm12=%xmm11,<xmm9=%xmm12
7628por   %xmm11,%xmm12
7629
7630# qhasm:       xmm13 &= xmm12
7631# asm 1: pand  <xmm12=int6464#12,<xmm13=int6464#11
7632# asm 2: pand  <xmm12=%xmm11,<xmm13=%xmm10
7633pand  %xmm11,%xmm10
7634
7635# qhasm:       xmm8 ^= xmm13
7636# asm 1: pxor  <xmm13=int6464#11,<xmm8=int6464#15
7637# asm 2: pxor  <xmm13=%xmm10,<xmm8=%xmm14
7638pxor  %xmm10,%xmm14
7639
7640# qhasm:       xmm11 ^= xmm15
7641# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#9
7642# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm8
7643pxor  %xmm15,%xmm8
7644
7645# qhasm:       xmm10 ^= xmm14
7646# asm 1: pxor  <xmm14=int6464#14,<xmm10=int6464#10
7647# asm 2: pxor  <xmm14=%xmm13,<xmm10=%xmm9
7648pxor  %xmm13,%xmm9
7649
7650# qhasm:       xmm9 ^= xmm15
7651# asm 1: pxor  <xmm15=int6464#16,<xmm9=int6464#13
7652# asm 2: pxor  <xmm15=%xmm15,<xmm9=%xmm12
7653pxor  %xmm15,%xmm12
7654
7655# qhasm:       xmm8 ^= xmm14
7656# asm 1: pxor  <xmm14=int6464#14,<xmm8=int6464#15
7657# asm 2: pxor  <xmm14=%xmm13,<xmm8=%xmm14
7658pxor  %xmm13,%xmm14
7659
7660# qhasm:       xmm9 ^= xmm14
7661# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
7662# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
7663pxor  %xmm13,%xmm12
7664
7665# qhasm:       xmm12 = xmm2
7666# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
7667# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
7668movdqa %xmm2,%xmm10
7669
7670# qhasm:       xmm13 = xmm4
7671# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7672# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7673movdqa %xmm4,%xmm11
7674
7675# qhasm:       xmm14 = xmm1
7676# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7677# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7678movdqa %xmm1,%xmm13
7679
7680# qhasm:       xmm15 = xmm7
7681# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
7682# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
7683movdqa %xmm7,%xmm15
7684
7685# qhasm:       xmm12 &= xmm3
7686# asm 1: pand  <xmm3=int6464#4,<xmm12=int6464#11
7687# asm 2: pand  <xmm3=%xmm3,<xmm12=%xmm10
7688pand  %xmm3,%xmm10
7689
7690# qhasm:       xmm13 &= xmm0
7691# asm 1: pand  <xmm0=int6464#1,<xmm13=int6464#12
7692# asm 2: pand  <xmm0=%xmm0,<xmm13=%xmm11
7693pand  %xmm0,%xmm11
7694
7695# qhasm:       xmm14 &= xmm5
7696# asm 1: pand  <xmm5=int6464#6,<xmm14=int6464#14
7697# asm 2: pand  <xmm5=%xmm5,<xmm14=%xmm13
7698pand  %xmm5,%xmm13
7699
7700# qhasm:       xmm15 |= xmm6
7701# asm 1: por   <xmm6=int6464#7,<xmm15=int6464#16
7702# asm 2: por   <xmm6=%xmm6,<xmm15=%xmm15
7703por   %xmm6,%xmm15
7704
7705# qhasm:       xmm11 ^= xmm12
7706# asm 1: pxor  <xmm12=int6464#11,<xmm11=int6464#9
7707# asm 2: pxor  <xmm12=%xmm10,<xmm11=%xmm8
7708pxor  %xmm10,%xmm8
7709
7710# qhasm:       xmm10 ^= xmm13
7711# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
7712# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
7713pxor  %xmm11,%xmm9
7714
7715# qhasm:       xmm9 ^= xmm14
7716# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
7717# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
7718pxor  %xmm13,%xmm12
7719
7720# qhasm:       xmm8 ^= xmm15
7721# asm 1: pxor  <xmm15=int6464#16,<xmm8=int6464#15
7722# asm 2: pxor  <xmm15=%xmm15,<xmm8=%xmm14
7723pxor  %xmm15,%xmm14
7724
7725# qhasm:       xmm12 = xmm11
7726# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7727# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7728movdqa %xmm8,%xmm10
7729
7730# qhasm:       xmm12 ^= xmm10
7731# asm 1: pxor  <xmm10=int6464#10,<xmm12=int6464#11
7732# asm 2: pxor  <xmm10=%xmm9,<xmm12=%xmm10
7733pxor  %xmm9,%xmm10
7734
7735# qhasm:       xmm11 &= xmm9
7736# asm 1: pand  <xmm9=int6464#13,<xmm11=int6464#9
7737# asm 2: pand  <xmm9=%xmm12,<xmm11=%xmm8
7738pand  %xmm12,%xmm8
7739
7740# qhasm:       xmm14 = xmm8
7741# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7742# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7743movdqa %xmm14,%xmm11
7744
7745# qhasm:       xmm14 ^= xmm11
7746# asm 1: pxor  <xmm11=int6464#9,<xmm14=int6464#12
7747# asm 2: pxor  <xmm11=%xmm8,<xmm14=%xmm11
7748pxor  %xmm8,%xmm11
7749
7750# qhasm:       xmm15 = xmm12
7751# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7752# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7753movdqa %xmm10,%xmm13
7754
7755# qhasm:       xmm15 &= xmm14
7756# asm 1: pand  <xmm14=int6464#12,<xmm15=int6464#14
7757# asm 2: pand  <xmm14=%xmm11,<xmm15=%xmm13
7758pand  %xmm11,%xmm13
7759
7760# qhasm:       xmm15 ^= xmm10
7761# asm 1: pxor  <xmm10=int6464#10,<xmm15=int6464#14
7762# asm 2: pxor  <xmm10=%xmm9,<xmm15=%xmm13
7763pxor  %xmm9,%xmm13
7764
7765# qhasm:       xmm13 = xmm9
7766# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7767# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7768movdqa %xmm12,%xmm15
7769
7770# qhasm:       xmm13 ^= xmm8
7771# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
7772# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
7773pxor  %xmm14,%xmm15
7774
7775# qhasm:       xmm11 ^= xmm10
7776# asm 1: pxor  <xmm10=int6464#10,<xmm11=int6464#9
7777# asm 2: pxor  <xmm10=%xmm9,<xmm11=%xmm8
7778pxor  %xmm9,%xmm8
7779
7780# qhasm:       xmm13 &= xmm11
7781# asm 1: pand  <xmm11=int6464#9,<xmm13=int6464#16
7782# asm 2: pand  <xmm11=%xmm8,<xmm13=%xmm15
7783pand  %xmm8,%xmm15
7784
7785# qhasm:       xmm13 ^= xmm8
7786# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
7787# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
7788pxor  %xmm14,%xmm15
7789
7790# qhasm:       xmm9 ^= xmm13
7791# asm 1: pxor  <xmm13=int6464#16,<xmm9=int6464#13
7792# asm 2: pxor  <xmm13=%xmm15,<xmm9=%xmm12
7793pxor  %xmm15,%xmm12
7794
7795# qhasm:       xmm10 = xmm14
7796# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7797# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7798movdqa %xmm11,%xmm8
7799
7800# qhasm:       xmm10 ^= xmm13
7801# asm 1: pxor  <xmm13=int6464#16,<xmm10=int6464#9
7802# asm 2: pxor  <xmm13=%xmm15,<xmm10=%xmm8
7803pxor  %xmm15,%xmm8
7804
7805# qhasm:       xmm10 &= xmm8
7806# asm 1: pand  <xmm8=int6464#15,<xmm10=int6464#9
7807# asm 2: pand  <xmm8=%xmm14,<xmm10=%xmm8
7808pand  %xmm14,%xmm8
7809
7810# qhasm:       xmm9 ^= xmm10
7811# asm 1: pxor  <xmm10=int6464#9,<xmm9=int6464#13
7812# asm 2: pxor  <xmm10=%xmm8,<xmm9=%xmm12
7813pxor  %xmm8,%xmm12
7814
7815# qhasm:       xmm14 ^= xmm10
7816# asm 1: pxor  <xmm10=int6464#9,<xmm14=int6464#12
7817# asm 2: pxor  <xmm10=%xmm8,<xmm14=%xmm11
7818pxor  %xmm8,%xmm11
7819
7820# qhasm:       xmm14 &= xmm15
7821# asm 1: pand  <xmm15=int6464#14,<xmm14=int6464#12
7822# asm 2: pand  <xmm15=%xmm13,<xmm14=%xmm11
7823pand  %xmm13,%xmm11
7824
7825# qhasm:       xmm14 ^= xmm12
7826# asm 1: pxor  <xmm12=int6464#11,<xmm14=int6464#12
7827# asm 2: pxor  <xmm12=%xmm10,<xmm14=%xmm11
7828pxor  %xmm10,%xmm11
7829
7830# qhasm:         xmm12 = xmm6
7831# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
7832# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
7833movdqa %xmm6,%xmm8
7834
7835# qhasm:         xmm8 = xmm5
7836# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
7837# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
7838movdqa %xmm5,%xmm9
7839
7840# qhasm:           xmm10 = xmm15
7841# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7842# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7843movdqa %xmm13,%xmm10
7844
7845# qhasm:           xmm10 ^= xmm14
7846# asm 1: pxor  <xmm14=int6464#12,<xmm10=int6464#11
7847# asm 2: pxor  <xmm14=%xmm11,<xmm10=%xmm10
7848pxor  %xmm11,%xmm10
7849
7850# qhasm:           xmm10 &= xmm6
7851# asm 1: pand  <xmm6=int6464#7,<xmm10=int6464#11
7852# asm 2: pand  <xmm6=%xmm6,<xmm10=%xmm10
7853pand  %xmm6,%xmm10
7854
7855# qhasm:           xmm6 ^= xmm5
7856# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
7857# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
7858pxor  %xmm5,%xmm6
7859
7860# qhasm:           xmm6 &= xmm14
7861# asm 1: pand  <xmm14=int6464#12,<xmm6=int6464#7
7862# asm 2: pand  <xmm14=%xmm11,<xmm6=%xmm6
7863pand  %xmm11,%xmm6
7864
7865# qhasm:           xmm5 &= xmm15
7866# asm 1: pand  <xmm15=int6464#14,<xmm5=int6464#6
7867# asm 2: pand  <xmm15=%xmm13,<xmm5=%xmm5
7868pand  %xmm13,%xmm5
7869
7870# qhasm:           xmm6 ^= xmm5
7871# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
7872# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
7873pxor  %xmm5,%xmm6
7874
7875# qhasm:           xmm5 ^= xmm10
7876# asm 1: pxor  <xmm10=int6464#11,<xmm5=int6464#6
7877# asm 2: pxor  <xmm10=%xmm10,<xmm5=%xmm5
7878pxor  %xmm10,%xmm5
7879
7880# qhasm:         xmm12 ^= xmm0
7881# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#9
7882# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm8
7883pxor  %xmm0,%xmm8
7884
7885# qhasm:         xmm8 ^= xmm3
7886# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#10
7887# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm9
7888pxor  %xmm3,%xmm9
7889
7890# qhasm:         xmm15 ^= xmm13
7891# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
7892# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
7893pxor  %xmm15,%xmm13
7894
7895# qhasm:         xmm14 ^= xmm9
7896# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
7897# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
7898pxor  %xmm12,%xmm11
7899
7900# qhasm:           xmm11 = xmm15
7901# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7902# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7903movdqa %xmm13,%xmm10
7904
7905# qhasm:           xmm11 ^= xmm14
7906# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
7907# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
7908pxor  %xmm11,%xmm10
7909
7910# qhasm:           xmm11 &= xmm12
7911# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
7912# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
7913pand  %xmm8,%xmm10
7914
7915# qhasm:           xmm12 ^= xmm8
7916# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
7917# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
7918pxor  %xmm9,%xmm8
7919
7920# qhasm:           xmm12 &= xmm14
7921# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
7922# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
7923pand  %xmm11,%xmm8
7924
7925# qhasm:           xmm8 &= xmm15
7926# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
7927# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
7928pand  %xmm13,%xmm9
7929
7930# qhasm:           xmm8 ^= xmm12
7931# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
7932# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
7933pxor  %xmm8,%xmm9
7934
7935# qhasm:           xmm12 ^= xmm11
7936# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
7937# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
7938pxor  %xmm10,%xmm8
7939
7940# qhasm:           xmm10 = xmm13
7941# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7942# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7943movdqa %xmm15,%xmm10
7944
7945# qhasm:           xmm10 ^= xmm9
7946# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
7947# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
7948pxor  %xmm12,%xmm10
7949
7950# qhasm:           xmm10 &= xmm0
7951# asm 1: pand  <xmm0=int6464#1,<xmm10=int6464#11
7952# asm 2: pand  <xmm0=%xmm0,<xmm10=%xmm10
7953pand  %xmm0,%xmm10
7954
7955# qhasm:           xmm0 ^= xmm3
7956# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
7957# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
7958pxor  %xmm3,%xmm0
7959
7960# qhasm:           xmm0 &= xmm9
7961# asm 1: pand  <xmm9=int6464#13,<xmm0=int6464#1
7962# asm 2: pand  <xmm9=%xmm12,<xmm0=%xmm0
7963pand  %xmm12,%xmm0
7964
7965# qhasm:           xmm3 &= xmm13
7966# asm 1: pand  <xmm13=int6464#16,<xmm3=int6464#4
7967# asm 2: pand  <xmm13=%xmm15,<xmm3=%xmm3
7968pand  %xmm15,%xmm3
7969
7970# qhasm:           xmm0 ^= xmm3
7971# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
7972# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
7973pxor  %xmm3,%xmm0
7974
7975# qhasm:           xmm3 ^= xmm10
7976# asm 1: pxor  <xmm10=int6464#11,<xmm3=int6464#4
7977# asm 2: pxor  <xmm10=%xmm10,<xmm3=%xmm3
7978pxor  %xmm10,%xmm3
7979
7980# qhasm:         xmm6 ^= xmm12
7981# asm 1: pxor  <xmm12=int6464#9,<xmm6=int6464#7
7982# asm 2: pxor  <xmm12=%xmm8,<xmm6=%xmm6
7983pxor  %xmm8,%xmm6
7984
7985# qhasm:         xmm0 ^= xmm12
7986# asm 1: pxor  <xmm12=int6464#9,<xmm0=int6464#1
7987# asm 2: pxor  <xmm12=%xmm8,<xmm0=%xmm0
7988pxor  %xmm8,%xmm0
7989
7990# qhasm:         xmm5 ^= xmm8
7991# asm 1: pxor  <xmm8=int6464#10,<xmm5=int6464#6
7992# asm 2: pxor  <xmm8=%xmm9,<xmm5=%xmm5
7993pxor  %xmm9,%xmm5
7994
7995# qhasm:         xmm3 ^= xmm8
7996# asm 1: pxor  <xmm8=int6464#10,<xmm3=int6464#4
7997# asm 2: pxor  <xmm8=%xmm9,<xmm3=%xmm3
7998pxor  %xmm9,%xmm3
7999
8000# qhasm:         xmm12 = xmm7
8001# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
8002# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
8003movdqa %xmm7,%xmm8
8004
8005# qhasm:         xmm8 = xmm1
8006# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
8007# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
8008movdqa %xmm1,%xmm9
8009
8010# qhasm:         xmm12 ^= xmm4
8011# asm 1: pxor  <xmm4=int6464#5,<xmm12=int6464#9
8012# asm 2: pxor  <xmm4=%xmm4,<xmm12=%xmm8
8013pxor  %xmm4,%xmm8
8014
8015# qhasm:         xmm8 ^= xmm2
8016# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#10
8017# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm9
8018pxor  %xmm2,%xmm9
8019
8020# qhasm:           xmm11 = xmm15
8021# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8022# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8023movdqa %xmm13,%xmm10
8024
8025# qhasm:           xmm11 ^= xmm14
8026# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
8027# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
8028pxor  %xmm11,%xmm10
8029
8030# qhasm:           xmm11 &= xmm12
8031# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
8032# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
8033pand  %xmm8,%xmm10
8034
8035# qhasm:           xmm12 ^= xmm8
8036# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
8037# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
8038pxor  %xmm9,%xmm8
8039
8040# qhasm:           xmm12 &= xmm14
8041# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
8042# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
8043pand  %xmm11,%xmm8
8044
8045# qhasm:           xmm8 &= xmm15
8046# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
8047# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
8048pand  %xmm13,%xmm9
8049
8050# qhasm:           xmm8 ^= xmm12
8051# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
8052# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
8053pxor  %xmm8,%xmm9
8054
8055# qhasm:           xmm12 ^= xmm11
8056# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
8057# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
8058pxor  %xmm10,%xmm8
8059
8060# qhasm:           xmm10 = xmm13
8061# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
8062# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
8063movdqa %xmm15,%xmm10
8064
8065# qhasm:           xmm10 ^= xmm9
8066# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
8067# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
8068pxor  %xmm12,%xmm10
8069
8070# qhasm:           xmm10 &= xmm4
8071# asm 1: pand  <xmm4=int6464#5,<xmm10=int6464#11
8072# asm 2: pand  <xmm4=%xmm4,<xmm10=%xmm10
8073pand  %xmm4,%xmm10
8074
8075# qhasm:           xmm4 ^= xmm2
8076# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
8077# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
8078pxor  %xmm2,%xmm4
8079
8080# qhasm:           xmm4 &= xmm9
8081# asm 1: pand  <xmm9=int6464#13,<xmm4=int6464#5
8082# asm 2: pand  <xmm9=%xmm12,<xmm4=%xmm4
8083pand  %xmm12,%xmm4
8084
8085# qhasm:           xmm2 &= xmm13
8086# asm 1: pand  <xmm13=int6464#16,<xmm2=int6464#3
8087# asm 2: pand  <xmm13=%xmm15,<xmm2=%xmm2
8088pand  %xmm15,%xmm2
8089
8090# qhasm:           xmm4 ^= xmm2
8091# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
8092# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
8093pxor  %xmm2,%xmm4
8094
8095# qhasm:           xmm2 ^= xmm10
8096# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#3
8097# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm2
8098pxor  %xmm10,%xmm2
8099
8100# qhasm:         xmm15 ^= xmm13
8101# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
8102# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
8103pxor  %xmm15,%xmm13
8104
8105# qhasm:         xmm14 ^= xmm9
8106# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
8107# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
8108pxor  %xmm12,%xmm11
8109
8110# qhasm:           xmm11 = xmm15
8111# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8112# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8113movdqa %xmm13,%xmm10
8114
8115# qhasm:           xmm11 ^= xmm14
8116# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
8117# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
8118pxor  %xmm11,%xmm10
8119
8120# qhasm:           xmm11 &= xmm7
8121# asm 1: pand  <xmm7=int6464#8,<xmm11=int6464#11
8122# asm 2: pand  <xmm7=%xmm7,<xmm11=%xmm10
8123pand  %xmm7,%xmm10
8124
8125# qhasm:           xmm7 ^= xmm1
8126# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
8127# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
8128pxor  %xmm1,%xmm7
8129
8130# qhasm:           xmm7 &= xmm14
8131# asm 1: pand  <xmm14=int6464#12,<xmm7=int6464#8
8132# asm 2: pand  <xmm14=%xmm11,<xmm7=%xmm7
8133pand  %xmm11,%xmm7
8134
8135# qhasm:           xmm1 &= xmm15
8136# asm 1: pand  <xmm15=int6464#14,<xmm1=int6464#2
8137# asm 2: pand  <xmm15=%xmm13,<xmm1=%xmm1
8138pand  %xmm13,%xmm1
8139
8140# qhasm:           xmm7 ^= xmm1
8141# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
8142# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
8143pxor  %xmm1,%xmm7
8144
8145# qhasm:           xmm1 ^= xmm11
8146# asm 1: pxor  <xmm11=int6464#11,<xmm1=int6464#2
8147# asm 2: pxor  <xmm11=%xmm10,<xmm1=%xmm1
8148pxor  %xmm10,%xmm1
8149
8150# qhasm:         xmm7 ^= xmm12
8151# asm 1: pxor  <xmm12=int6464#9,<xmm7=int6464#8
8152# asm 2: pxor  <xmm12=%xmm8,<xmm7=%xmm7
8153pxor  %xmm8,%xmm7
8154
8155# qhasm:         xmm4 ^= xmm12
8156# asm 1: pxor  <xmm12=int6464#9,<xmm4=int6464#5
8157# asm 2: pxor  <xmm12=%xmm8,<xmm4=%xmm4
8158pxor  %xmm8,%xmm4
8159
8160# qhasm:         xmm1 ^= xmm8
8161# asm 1: pxor  <xmm8=int6464#10,<xmm1=int6464#2
8162# asm 2: pxor  <xmm8=%xmm9,<xmm1=%xmm1
8163pxor  %xmm9,%xmm1
8164
8165# qhasm:         xmm2 ^= xmm8
8166# asm 1: pxor  <xmm8=int6464#10,<xmm2=int6464#3
8167# asm 2: pxor  <xmm8=%xmm9,<xmm2=%xmm2
8168pxor  %xmm9,%xmm2
8169
8170# qhasm:       xmm7 ^= xmm0
8171# asm 1: pxor  <xmm0=int6464#1,<xmm7=int6464#8
8172# asm 2: pxor  <xmm0=%xmm0,<xmm7=%xmm7
8173pxor  %xmm0,%xmm7
8174
8175# qhasm:       xmm1 ^= xmm6
8176# asm 1: pxor  <xmm6=int6464#7,<xmm1=int6464#2
8177# asm 2: pxor  <xmm6=%xmm6,<xmm1=%xmm1
8178pxor  %xmm6,%xmm1
8179
8180# qhasm:       xmm4 ^= xmm7
8181# asm 1: pxor  <xmm7=int6464#8,<xmm4=int6464#5
8182# asm 2: pxor  <xmm7=%xmm7,<xmm4=%xmm4
8183pxor  %xmm7,%xmm4
8184
8185# qhasm:       xmm6 ^= xmm0
8186# asm 1: pxor  <xmm0=int6464#1,<xmm6=int6464#7
8187# asm 2: pxor  <xmm0=%xmm0,<xmm6=%xmm6
8188pxor  %xmm0,%xmm6
8189
8190# qhasm:       xmm0 ^= xmm1
8191# asm 1: pxor  <xmm1=int6464#2,<xmm0=int6464#1
8192# asm 2: pxor  <xmm1=%xmm1,<xmm0=%xmm0
8193pxor  %xmm1,%xmm0
8194
8195# qhasm:       xmm1 ^= xmm5
8196# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
8197# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
8198pxor  %xmm5,%xmm1
8199
8200# qhasm:       xmm5 ^= xmm2
8201# asm 1: pxor  <xmm2=int6464#3,<xmm5=int6464#6
8202# asm 2: pxor  <xmm2=%xmm2,<xmm5=%xmm5
8203pxor  %xmm2,%xmm5
8204
8205# qhasm:       xmm4 ^= xmm5
8206# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
8207# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
8208pxor  %xmm5,%xmm4
8209
8210# qhasm:       xmm2 ^= xmm3
8211# asm 1: pxor  <xmm3=int6464#4,<xmm2=int6464#3
8212# asm 2: pxor  <xmm3=%xmm3,<xmm2=%xmm2
8213pxor  %xmm3,%xmm2
8214
8215# qhasm:       xmm3 ^= xmm5
8216# asm 1: pxor  <xmm5=int6464#6,<xmm3=int6464#4
8217# asm 2: pxor  <xmm5=%xmm5,<xmm3=%xmm3
8218pxor  %xmm5,%xmm3
8219
8220# qhasm:       xmm6 ^= xmm3
8221# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
8222# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
8223pxor  %xmm3,%xmm6
8224
8225# qhasm:     xmm8 = shuffle dwords of xmm0 by 0x93
8226# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
8227# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
8228pshufd $0x93,%xmm0,%xmm8
8229
8230# qhasm:     xmm9 = shuffle dwords of xmm1 by 0x93
8231# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
8232# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
8233pshufd $0x93,%xmm1,%xmm9
8234
8235# qhasm:     xmm10 = shuffle dwords of xmm4 by 0x93
8236# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
8237# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
8238pshufd $0x93,%xmm4,%xmm10
8239
8240# qhasm:     xmm11 = shuffle dwords of xmm6 by 0x93
8241# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
8242# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
8243pshufd $0x93,%xmm6,%xmm11
8244
8245# qhasm:     xmm12 = shuffle dwords of xmm3 by 0x93
8246# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
8247# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
8248pshufd $0x93,%xmm3,%xmm12
8249
8250# qhasm:     xmm13 = shuffle dwords of xmm7 by 0x93
8251# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
8252# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
8253pshufd $0x93,%xmm7,%xmm13
8254
8255# qhasm:     xmm14 = shuffle dwords of xmm2 by 0x93
8256# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
8257# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
8258pshufd $0x93,%xmm2,%xmm14
8259
8260# qhasm:     xmm15 = shuffle dwords of xmm5 by 0x93
8261# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
8262# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
8263pshufd $0x93,%xmm5,%xmm15
8264
8265# qhasm:     xmm0 ^= xmm8
8266# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
8267# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
8268pxor  %xmm8,%xmm0
8269
8270# qhasm:     xmm1 ^= xmm9
8271# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
8272# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
8273pxor  %xmm9,%xmm1
8274
8275# qhasm:     xmm4 ^= xmm10
8276# asm 1: pxor  <xmm10=int6464#11,<xmm4=int6464#5
8277# asm 2: pxor  <xmm10=%xmm10,<xmm4=%xmm4
8278pxor  %xmm10,%xmm4
8279
8280# qhasm:     xmm6 ^= xmm11
8281# asm 1: pxor  <xmm11=int6464#12,<xmm6=int6464#7
8282# asm 2: pxor  <xmm11=%xmm11,<xmm6=%xmm6
8283pxor  %xmm11,%xmm6
8284
8285# qhasm:     xmm3 ^= xmm12
8286# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
8287# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
8288pxor  %xmm12,%xmm3
8289
8290# qhasm:     xmm7 ^= xmm13
8291# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
8292# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
8293pxor  %xmm13,%xmm7
8294
8295# qhasm:     xmm2 ^= xmm14
8296# asm 1: pxor  <xmm14=int6464#15,<xmm2=int6464#3
8297# asm 2: pxor  <xmm14=%xmm14,<xmm2=%xmm2
8298pxor  %xmm14,%xmm2
8299
8300# qhasm:     xmm5 ^= xmm15
8301# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
8302# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
8303pxor  %xmm15,%xmm5
8304
8305# qhasm:     xmm8 ^= xmm5
8306# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
8307# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
8308pxor  %xmm5,%xmm8
8309
8310# qhasm:     xmm9 ^= xmm0
8311# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
8312# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
8313pxor  %xmm0,%xmm9
8314
8315# qhasm:     xmm10 ^= xmm1
8316# asm 1: pxor  <xmm1=int6464#2,<xmm10=int6464#11
8317# asm 2: pxor  <xmm1=%xmm1,<xmm10=%xmm10
8318pxor  %xmm1,%xmm10
8319
8320# qhasm:     xmm9 ^= xmm5
8321# asm 1: pxor  <xmm5=int6464#6,<xmm9=int6464#10
8322# asm 2: pxor  <xmm5=%xmm5,<xmm9=%xmm9
8323pxor  %xmm5,%xmm9
8324
8325# qhasm:     xmm11 ^= xmm4
8326# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
8327# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
8328pxor  %xmm4,%xmm11
8329
8330# qhasm:     xmm12 ^= xmm6
8331# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#13
8332# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm12
8333pxor  %xmm6,%xmm12
8334
8335# qhasm:     xmm13 ^= xmm3
8336# asm 1: pxor  <xmm3=int6464#4,<xmm13=int6464#14
8337# asm 2: pxor  <xmm3=%xmm3,<xmm13=%xmm13
8338pxor  %xmm3,%xmm13
8339
8340# qhasm:     xmm11 ^= xmm5
8341# asm 1: pxor  <xmm5=int6464#6,<xmm11=int6464#12
8342# asm 2: pxor  <xmm5=%xmm5,<xmm11=%xmm11
8343pxor  %xmm5,%xmm11
8344
8345# qhasm:     xmm14 ^= xmm7
8346# asm 1: pxor  <xmm7=int6464#8,<xmm14=int6464#15
8347# asm 2: pxor  <xmm7=%xmm7,<xmm14=%xmm14
8348pxor  %xmm7,%xmm14
8349
8350# qhasm:     xmm15 ^= xmm2
8351# asm 1: pxor  <xmm2=int6464#3,<xmm15=int6464#16
8352# asm 2: pxor  <xmm2=%xmm2,<xmm15=%xmm15
8353pxor  %xmm2,%xmm15
8354
8355# qhasm:     xmm12 ^= xmm5
8356# asm 1: pxor  <xmm5=int6464#6,<xmm12=int6464#13
8357# asm 2: pxor  <xmm5=%xmm5,<xmm12=%xmm12
8358pxor  %xmm5,%xmm12
8359
8360# qhasm:     xmm0 = shuffle dwords of xmm0 by 0x4E
8361# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
8362# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
8363pshufd $0x4E,%xmm0,%xmm0
8364
8365# qhasm:     xmm1 = shuffle dwords of xmm1 by 0x4E
8366# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
8367# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
8368pshufd $0x4E,%xmm1,%xmm1
8369
8370# qhasm:     xmm4 = shuffle dwords of xmm4 by 0x4E
8371# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
8372# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
8373pshufd $0x4E,%xmm4,%xmm4
8374
8375# qhasm:     xmm6 = shuffle dwords of xmm6 by 0x4E
8376# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
8377# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
8378pshufd $0x4E,%xmm6,%xmm6
8379
8380# qhasm:     xmm3 = shuffle dwords of xmm3 by 0x4E
8381# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
8382# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
8383pshufd $0x4E,%xmm3,%xmm3
8384
8385# qhasm:     xmm7 = shuffle dwords of xmm7 by 0x4E
8386# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
8387# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
8388pshufd $0x4E,%xmm7,%xmm7
8389
8390# qhasm:     xmm2 = shuffle dwords of xmm2 by 0x4E
8391# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
8392# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
8393pshufd $0x4E,%xmm2,%xmm2
8394
8395# qhasm:     xmm5 = shuffle dwords of xmm5 by 0x4E
8396# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
8397# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
8398pshufd $0x4E,%xmm5,%xmm5
8399
8400# qhasm:     xmm8 ^= xmm0
8401# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
8402# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
8403pxor  %xmm0,%xmm8
8404
8405# qhasm:     xmm9 ^= xmm1
8406# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
8407# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
8408pxor  %xmm1,%xmm9
8409
8410# qhasm:     xmm10 ^= xmm4
8411# asm 1: pxor  <xmm4=int6464#5,<xmm10=int6464#11
8412# asm 2: pxor  <xmm4=%xmm4,<xmm10=%xmm10
8413pxor  %xmm4,%xmm10
8414
8415# qhasm:     xmm11 ^= xmm6
8416# asm 1: pxor  <xmm6=int6464#7,<xmm11=int6464#12
8417# asm 2: pxor  <xmm6=%xmm6,<xmm11=%xmm11
8418pxor  %xmm6,%xmm11
8419
8420# qhasm:     xmm12 ^= xmm3
8421# asm 1: pxor  <xmm3=int6464#4,<xmm12=int6464#13
8422# asm 2: pxor  <xmm3=%xmm3,<xmm12=%xmm12
8423pxor  %xmm3,%xmm12
8424
8425# qhasm:     xmm13 ^= xmm7
8426# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
8427# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
8428pxor  %xmm7,%xmm13
8429
8430# qhasm:     xmm14 ^= xmm2
8431# asm 1: pxor  <xmm2=int6464#3,<xmm14=int6464#15
8432# asm 2: pxor  <xmm2=%xmm2,<xmm14=%xmm14
8433pxor  %xmm2,%xmm14
8434
8435# qhasm:     xmm15 ^= xmm5
8436# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
8437# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
8438pxor  %xmm5,%xmm15
8439
8440# qhasm:     xmm8 ^= *(int128 *)(c + 896)
8441# asm 1: pxor 896(<c=int64#4),<xmm8=int6464#9
8442# asm 2: pxor 896(<c=%rcx),<xmm8=%xmm8
8443pxor 896(%rcx),%xmm8
8444
8445# qhasm:     shuffle bytes of xmm8 by SR
8446# asm 1: pshufb SR,<xmm8=int6464#9
8447# asm 2: pshufb SR,<xmm8=%xmm8
8448pshufb SR,%xmm8
8449
8450# qhasm:     xmm9 ^= *(int128 *)(c + 912)
8451# asm 1: pxor 912(<c=int64#4),<xmm9=int6464#10
8452# asm 2: pxor 912(<c=%rcx),<xmm9=%xmm9
8453pxor 912(%rcx),%xmm9
8454
8455# qhasm:     shuffle bytes of xmm9 by SR
8456# asm 1: pshufb SR,<xmm9=int6464#10
8457# asm 2: pshufb SR,<xmm9=%xmm9
8458pshufb SR,%xmm9
8459
8460# qhasm:     xmm10 ^= *(int128 *)(c + 928)
8461# asm 1: pxor 928(<c=int64#4),<xmm10=int6464#11
8462# asm 2: pxor 928(<c=%rcx),<xmm10=%xmm10
8463pxor 928(%rcx),%xmm10
8464
8465# qhasm:     shuffle bytes of xmm10 by SR
8466# asm 1: pshufb SR,<xmm10=int6464#11
8467# asm 2: pshufb SR,<xmm10=%xmm10
8468pshufb SR,%xmm10
8469
8470# qhasm:     xmm11 ^= *(int128 *)(c + 944)
8471# asm 1: pxor 944(<c=int64#4),<xmm11=int6464#12
8472# asm 2: pxor 944(<c=%rcx),<xmm11=%xmm11
8473pxor 944(%rcx),%xmm11
8474
8475# qhasm:     shuffle bytes of xmm11 by SR
8476# asm 1: pshufb SR,<xmm11=int6464#12
8477# asm 2: pshufb SR,<xmm11=%xmm11
8478pshufb SR,%xmm11
8479
8480# qhasm:     xmm12 ^= *(int128 *)(c + 960)
8481# asm 1: pxor 960(<c=int64#4),<xmm12=int6464#13
8482# asm 2: pxor 960(<c=%rcx),<xmm12=%xmm12
8483pxor 960(%rcx),%xmm12
8484
8485# qhasm:     shuffle bytes of xmm12 by SR
8486# asm 1: pshufb SR,<xmm12=int6464#13
8487# asm 2: pshufb SR,<xmm12=%xmm12
8488pshufb SR,%xmm12
8489
8490# qhasm:     xmm13 ^= *(int128 *)(c + 976)
8491# asm 1: pxor 976(<c=int64#4),<xmm13=int6464#14
8492# asm 2: pxor 976(<c=%rcx),<xmm13=%xmm13
8493pxor 976(%rcx),%xmm13
8494
8495# qhasm:     shuffle bytes of xmm13 by SR
8496# asm 1: pshufb SR,<xmm13=int6464#14
8497# asm 2: pshufb SR,<xmm13=%xmm13
8498pshufb SR,%xmm13
8499
8500# qhasm:     xmm14 ^= *(int128 *)(c + 992)
8501# asm 1: pxor 992(<c=int64#4),<xmm14=int6464#15
8502# asm 2: pxor 992(<c=%rcx),<xmm14=%xmm14
8503pxor 992(%rcx),%xmm14
8504
8505# qhasm:     shuffle bytes of xmm14 by SR
8506# asm 1: pshufb SR,<xmm14=int6464#15
8507# asm 2: pshufb SR,<xmm14=%xmm14
8508pshufb SR,%xmm14
8509
8510# qhasm:     xmm15 ^= *(int128 *)(c + 1008)
8511# asm 1: pxor 1008(<c=int64#4),<xmm15=int6464#16
8512# asm 2: pxor 1008(<c=%rcx),<xmm15=%xmm15
8513pxor 1008(%rcx),%xmm15
8514
8515# qhasm:     shuffle bytes of xmm15 by SR
8516# asm 1: pshufb SR,<xmm15=int6464#16
8517# asm 2: pshufb SR,<xmm15=%xmm15
8518pshufb SR,%xmm15
8519
8520# qhasm:       xmm13 ^= xmm14
8521# asm 1: pxor  <xmm14=int6464#15,<xmm13=int6464#14
8522# asm 2: pxor  <xmm14=%xmm14,<xmm13=%xmm13
8523pxor  %xmm14,%xmm13
8524
8525# qhasm:       xmm10 ^= xmm9
8526# asm 1: pxor  <xmm9=int6464#10,<xmm10=int6464#11
8527# asm 2: pxor  <xmm9=%xmm9,<xmm10=%xmm10
8528pxor  %xmm9,%xmm10
8529
8530# qhasm:       xmm13 ^= xmm8
8531# asm 1: pxor  <xmm8=int6464#9,<xmm13=int6464#14
8532# asm 2: pxor  <xmm8=%xmm8,<xmm13=%xmm13
8533pxor  %xmm8,%xmm13
8534
8535# qhasm:       xmm14 ^= xmm10
8536# asm 1: pxor  <xmm10=int6464#11,<xmm14=int6464#15
8537# asm 2: pxor  <xmm10=%xmm10,<xmm14=%xmm14
8538pxor  %xmm10,%xmm14
8539
8540# qhasm:       xmm11 ^= xmm8
8541# asm 1: pxor  <xmm8=int6464#9,<xmm11=int6464#12
8542# asm 2: pxor  <xmm8=%xmm8,<xmm11=%xmm11
8543pxor  %xmm8,%xmm11
8544
8545# qhasm:       xmm14 ^= xmm11
8546# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
8547# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
8548pxor  %xmm11,%xmm14
8549
8550# qhasm:       xmm11 ^= xmm15
8551# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#12
8552# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm11
8553pxor  %xmm15,%xmm11
8554
8555# qhasm:       xmm11 ^= xmm12
8556# asm 1: pxor  <xmm12=int6464#13,<xmm11=int6464#12
8557# asm 2: pxor  <xmm12=%xmm12,<xmm11=%xmm11
8558pxor  %xmm12,%xmm11
8559
8560# qhasm:       xmm15 ^= xmm13
8561# asm 1: pxor  <xmm13=int6464#14,<xmm15=int6464#16
8562# asm 2: pxor  <xmm13=%xmm13,<xmm15=%xmm15
8563pxor  %xmm13,%xmm15
8564
8565# qhasm:       xmm11 ^= xmm9
8566# asm 1: pxor  <xmm9=int6464#10,<xmm11=int6464#12
8567# asm 2: pxor  <xmm9=%xmm9,<xmm11=%xmm11
8568pxor  %xmm9,%xmm11
8569
8570# qhasm:       xmm12 ^= xmm13
8571# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
8572# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
8573pxor  %xmm13,%xmm12
8574
8575# qhasm:       xmm10 ^= xmm15
8576# asm 1: pxor  <xmm15=int6464#16,<xmm10=int6464#11
8577# asm 2: pxor  <xmm15=%xmm15,<xmm10=%xmm10
8578pxor  %xmm15,%xmm10
8579
8580# qhasm:       xmm9 ^= xmm13
8581# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
8582# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
8583pxor  %xmm13,%xmm9
8584
8585# qhasm:       xmm3 = xmm15
8586# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
8587# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
8588movdqa %xmm15,%xmm0
8589
8590# qhasm:       xmm2 = xmm9
8591# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
8592# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
8593movdqa %xmm9,%xmm1
8594
8595# qhasm:       xmm1 = xmm13
8596# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
8597# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
8598movdqa %xmm13,%xmm2
8599
8600# qhasm:       xmm5 = xmm10
8601# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
8602# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
8603movdqa %xmm10,%xmm3
8604
8605# qhasm:       xmm4 = xmm14
8606# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
8607# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
8608movdqa %xmm14,%xmm4
8609
8610# qhasm:       xmm3 ^= xmm12
8611# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#1
8612# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm0
8613pxor  %xmm12,%xmm0
8614
8615# qhasm:       xmm2 ^= xmm10
8616# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#2
8617# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm1
8618pxor  %xmm10,%xmm1
8619
8620# qhasm:       xmm1 ^= xmm11
8621# asm 1: pxor  <xmm11=int6464#12,<xmm1=int6464#3
8622# asm 2: pxor  <xmm11=%xmm11,<xmm1=%xmm2
8623pxor  %xmm11,%xmm2
8624
8625# qhasm:       xmm5 ^= xmm12
8626# asm 1: pxor  <xmm12=int6464#13,<xmm5=int6464#4
8627# asm 2: pxor  <xmm12=%xmm12,<xmm5=%xmm3
8628pxor  %xmm12,%xmm3
8629
8630# qhasm:       xmm4 ^= xmm8
8631# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
8632# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
8633pxor  %xmm8,%xmm4
8634
8635# qhasm:       xmm6 = xmm3
8636# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
8637# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
8638movdqa %xmm0,%xmm5
8639
8640# qhasm:       xmm0 = xmm2
8641# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
8642# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
8643movdqa %xmm1,%xmm6
8644
8645# qhasm:       xmm7 = xmm3
8646# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
8647# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
8648movdqa %xmm0,%xmm7
8649
8650# qhasm:       xmm2 |= xmm1
8651# asm 1: por   <xmm1=int6464#3,<xmm2=int6464#2
8652# asm 2: por   <xmm1=%xmm2,<xmm2=%xmm1
8653por   %xmm2,%xmm1
8654
8655# qhasm:       xmm3 |= xmm4
8656# asm 1: por   <xmm4=int6464#5,<xmm3=int6464#1
8657# asm 2: por   <xmm4=%xmm4,<xmm3=%xmm0
8658por   %xmm4,%xmm0
8659
8660# qhasm:       xmm7 ^= xmm0
8661# asm 1: pxor  <xmm0=int6464#7,<xmm7=int6464#8
8662# asm 2: pxor  <xmm0=%xmm6,<xmm7=%xmm7
8663pxor  %xmm6,%xmm7
8664
8665# qhasm:       xmm6 &= xmm4
8666# asm 1: pand  <xmm4=int6464#5,<xmm6=int6464#6
8667# asm 2: pand  <xmm4=%xmm4,<xmm6=%xmm5
8668pand  %xmm4,%xmm5
8669
8670# qhasm:       xmm0 &= xmm1
8671# asm 1: pand  <xmm1=int6464#3,<xmm0=int6464#7
8672# asm 2: pand  <xmm1=%xmm2,<xmm0=%xmm6
8673pand  %xmm2,%xmm6
8674
8675# qhasm:       xmm4 ^= xmm1
8676# asm 1: pxor  <xmm1=int6464#3,<xmm4=int6464#5
8677# asm 2: pxor  <xmm1=%xmm2,<xmm4=%xmm4
8678pxor  %xmm2,%xmm4
8679
8680# qhasm:       xmm7 &= xmm4
8681# asm 1: pand  <xmm4=int6464#5,<xmm7=int6464#8
8682# asm 2: pand  <xmm4=%xmm4,<xmm7=%xmm7
8683pand  %xmm4,%xmm7
8684
8685# qhasm:       xmm4 = xmm11
8686# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
8687# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
8688movdqa %xmm11,%xmm2
8689
8690# qhasm:       xmm4 ^= xmm8
8691# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#3
8692# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm2
8693pxor  %xmm8,%xmm2
8694
8695# qhasm:       xmm5 &= xmm4
8696# asm 1: pand  <xmm4=int6464#3,<xmm5=int6464#4
8697# asm 2: pand  <xmm4=%xmm2,<xmm5=%xmm3
8698pand  %xmm2,%xmm3
8699
8700# qhasm:       xmm3 ^= xmm5
8701# asm 1: pxor  <xmm5=int6464#4,<xmm3=int6464#1
8702# asm 2: pxor  <xmm5=%xmm3,<xmm3=%xmm0
8703pxor  %xmm3,%xmm0
8704
8705# qhasm:       xmm2 ^= xmm5
8706# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
8707# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
8708pxor  %xmm3,%xmm1
8709
8710# qhasm:       xmm5 = xmm15
8711# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
8712# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
8713movdqa %xmm15,%xmm2
8714
8715# qhasm:       xmm5 ^= xmm9
8716# asm 1: pxor  <xmm9=int6464#10,<xmm5=int6464#3
8717# asm 2: pxor  <xmm9=%xmm9,<xmm5=%xmm2
8718pxor  %xmm9,%xmm2
8719
8720# qhasm:       xmm4 = xmm13
8721# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
8722# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
8723movdqa %xmm13,%xmm3
8724
8725# qhasm:       xmm1 = xmm5
8726# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
8727# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
8728movdqa %xmm2,%xmm4
8729
8730# qhasm:       xmm4 ^= xmm14
8731# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#4
8732# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm3
8733pxor  %xmm14,%xmm3
8734
8735# qhasm:       xmm1 |= xmm4
8736# asm 1: por   <xmm4=int6464#4,<xmm1=int6464#5
8737# asm 2: por   <xmm4=%xmm3,<xmm1=%xmm4
8738por   %xmm3,%xmm4
8739
8740# qhasm:       xmm5 &= xmm4
8741# asm 1: pand  <xmm4=int6464#4,<xmm5=int6464#3
8742# asm 2: pand  <xmm4=%xmm3,<xmm5=%xmm2
8743pand  %xmm3,%xmm2
8744
8745# qhasm:       xmm0 ^= xmm5
8746# asm 1: pxor  <xmm5=int6464#3,<xmm0=int6464#7
8747# asm 2: pxor  <xmm5=%xmm2,<xmm0=%xmm6
8748pxor  %xmm2,%xmm6
8749
8750# qhasm:       xmm3 ^= xmm7
8751# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#1
8752# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm0
8753pxor  %xmm7,%xmm0
8754
8755# qhasm:       xmm2 ^= xmm6
8756# asm 1: pxor  <xmm6=int6464#6,<xmm2=int6464#2
8757# asm 2: pxor  <xmm6=%xmm5,<xmm2=%xmm1
8758pxor  %xmm5,%xmm1
8759
8760# qhasm:       xmm1 ^= xmm7
8761# asm 1: pxor  <xmm7=int6464#8,<xmm1=int6464#5
8762# asm 2: pxor  <xmm7=%xmm7,<xmm1=%xmm4
8763pxor  %xmm7,%xmm4
8764
8765# qhasm:       xmm0 ^= xmm6
8766# asm 1: pxor  <xmm6=int6464#6,<xmm0=int6464#7
8767# asm 2: pxor  <xmm6=%xmm5,<xmm0=%xmm6
8768pxor  %xmm5,%xmm6
8769
8770# qhasm:       xmm1 ^= xmm6
8771# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
8772# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
8773pxor  %xmm5,%xmm4
8774
8775# qhasm:       xmm4 = xmm10
8776# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
8777# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
8778movdqa %xmm10,%xmm2
8779
8780# qhasm:       xmm5 = xmm12
8781# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
8782# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
8783movdqa %xmm12,%xmm3
8784
8785# qhasm:       xmm6 = xmm9
8786# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
8787# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
8788movdqa %xmm9,%xmm5
8789
8790# qhasm:       xmm7 = xmm15
8791# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
8792# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
8793movdqa %xmm15,%xmm7
8794
8795# qhasm:       xmm4 &= xmm11
8796# asm 1: pand  <xmm11=int6464#12,<xmm4=int6464#3
8797# asm 2: pand  <xmm11=%xmm11,<xmm4=%xmm2
8798pand  %xmm11,%xmm2
8799
8800# qhasm:       xmm5 &= xmm8
8801# asm 1: pand  <xmm8=int6464#9,<xmm5=int6464#4
8802# asm 2: pand  <xmm8=%xmm8,<xmm5=%xmm3
8803pand  %xmm8,%xmm3
8804
8805# qhasm:       xmm6 &= xmm13
8806# asm 1: pand  <xmm13=int6464#14,<xmm6=int6464#6
8807# asm 2: pand  <xmm13=%xmm13,<xmm6=%xmm5
8808pand  %xmm13,%xmm5
8809
8810# qhasm:       xmm7 |= xmm14
8811# asm 1: por   <xmm14=int6464#15,<xmm7=int6464#8
8812# asm 2: por   <xmm14=%xmm14,<xmm7=%xmm7
8813por   %xmm14,%xmm7
8814
8815# qhasm:       xmm3 ^= xmm4
8816# asm 1: pxor  <xmm4=int6464#3,<xmm3=int6464#1
8817# asm 2: pxor  <xmm4=%xmm2,<xmm3=%xmm0
8818pxor  %xmm2,%xmm0
8819
8820# qhasm:       xmm2 ^= xmm5
8821# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
8822# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
8823pxor  %xmm3,%xmm1
8824
8825# qhasm:       xmm1 ^= xmm6
8826# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
8827# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
8828pxor  %xmm5,%xmm4
8829
8830# qhasm:       xmm0 ^= xmm7
8831# asm 1: pxor  <xmm7=int6464#8,<xmm0=int6464#7
8832# asm 2: pxor  <xmm7=%xmm7,<xmm0=%xmm6
8833pxor  %xmm7,%xmm6
8834
8835# qhasm:       xmm4 = xmm3
8836# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
8837# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
8838movdqa %xmm0,%xmm2
8839
8840# qhasm:       xmm4 ^= xmm2
8841# asm 1: pxor  <xmm2=int6464#2,<xmm4=int6464#3
8842# asm 2: pxor  <xmm2=%xmm1,<xmm4=%xmm2
8843pxor  %xmm1,%xmm2
8844
8845# qhasm:       xmm3 &= xmm1
8846# asm 1: pand  <xmm1=int6464#5,<xmm3=int6464#1
8847# asm 2: pand  <xmm1=%xmm4,<xmm3=%xmm0
8848pand  %xmm4,%xmm0
8849
8850# qhasm:       xmm6 = xmm0
8851# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
8852# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
8853movdqa %xmm6,%xmm3
8854
8855# qhasm:       xmm6 ^= xmm3
8856# asm 1: pxor  <xmm3=int6464#1,<xmm6=int6464#4
8857# asm 2: pxor  <xmm3=%xmm0,<xmm6=%xmm3
8858pxor  %xmm0,%xmm3
8859
8860# qhasm:       xmm7 = xmm4
8861# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
8862# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
8863movdqa %xmm2,%xmm5
8864
8865# qhasm:       xmm7 &= xmm6
8866# asm 1: pand  <xmm6=int6464#4,<xmm7=int6464#6
8867# asm 2: pand  <xmm6=%xmm3,<xmm7=%xmm5
8868pand  %xmm3,%xmm5
8869
8870# qhasm:       xmm7 ^= xmm2
8871# asm 1: pxor  <xmm2=int6464#2,<xmm7=int6464#6
8872# asm 2: pxor  <xmm2=%xmm1,<xmm7=%xmm5
8873pxor  %xmm1,%xmm5
8874
8875# qhasm:       xmm5 = xmm1
8876# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
8877# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
8878movdqa %xmm4,%xmm7
8879
8880# qhasm:       xmm5 ^= xmm0
8881# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
8882# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
8883pxor  %xmm6,%xmm7
8884
8885# qhasm:       xmm3 ^= xmm2
8886# asm 1: pxor  <xmm2=int6464#2,<xmm3=int6464#1
8887# asm 2: pxor  <xmm2=%xmm1,<xmm3=%xmm0
8888pxor  %xmm1,%xmm0
8889
8890# qhasm:       xmm5 &= xmm3
8891# asm 1: pand  <xmm3=int6464#1,<xmm5=int6464#8
8892# asm 2: pand  <xmm3=%xmm0,<xmm5=%xmm7
8893pand  %xmm0,%xmm7
8894
8895# qhasm:       xmm5 ^= xmm0
8896# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
8897# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
8898pxor  %xmm6,%xmm7
8899
8900# qhasm:       xmm1 ^= xmm5
8901# asm 1: pxor  <xmm5=int6464#8,<xmm1=int6464#5
8902# asm 2: pxor  <xmm5=%xmm7,<xmm1=%xmm4
8903pxor  %xmm7,%xmm4
8904
8905# qhasm:       xmm2 = xmm6
8906# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
8907# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
8908movdqa %xmm3,%xmm0
8909
8910# qhasm:       xmm2 ^= xmm5
8911# asm 1: pxor  <xmm5=int6464#8,<xmm2=int6464#1
8912# asm 2: pxor  <xmm5=%xmm7,<xmm2=%xmm0
8913pxor  %xmm7,%xmm0
8914
8915# qhasm:       xmm2 &= xmm0
8916# asm 1: pand  <xmm0=int6464#7,<xmm2=int6464#1
8917# asm 2: pand  <xmm0=%xmm6,<xmm2=%xmm0
8918pand  %xmm6,%xmm0
8919
8920# qhasm:       xmm1 ^= xmm2
8921# asm 1: pxor  <xmm2=int6464#1,<xmm1=int6464#5
8922# asm 2: pxor  <xmm2=%xmm0,<xmm1=%xmm4
8923pxor  %xmm0,%xmm4
8924
8925# qhasm:       xmm6 ^= xmm2
8926# asm 1: pxor  <xmm2=int6464#1,<xmm6=int6464#4
8927# asm 2: pxor  <xmm2=%xmm0,<xmm6=%xmm3
8928pxor  %xmm0,%xmm3
8929
8930# qhasm:       xmm6 &= xmm7
8931# asm 1: pand  <xmm7=int6464#6,<xmm6=int6464#4
8932# asm 2: pand  <xmm7=%xmm5,<xmm6=%xmm3
8933pand  %xmm5,%xmm3
8934
8935# qhasm:       xmm6 ^= xmm4
8936# asm 1: pxor  <xmm4=int6464#3,<xmm6=int6464#4
8937# asm 2: pxor  <xmm4=%xmm2,<xmm6=%xmm3
8938pxor  %xmm2,%xmm3
8939
8940# qhasm:         xmm4 = xmm14
8941# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
8942# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
8943movdqa %xmm14,%xmm0
8944
8945# qhasm:         xmm0 = xmm13
8946# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
8947# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
8948movdqa %xmm13,%xmm1
8949
8950# qhasm:           xmm2 = xmm7
8951# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
8952# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
8953movdqa %xmm5,%xmm2
8954
8955# qhasm:           xmm2 ^= xmm6
8956# asm 1: pxor  <xmm6=int6464#4,<xmm2=int6464#3
8957# asm 2: pxor  <xmm6=%xmm3,<xmm2=%xmm2
8958pxor  %xmm3,%xmm2
8959
8960# qhasm:           xmm2 &= xmm14
8961# asm 1: pand  <xmm14=int6464#15,<xmm2=int6464#3
8962# asm 2: pand  <xmm14=%xmm14,<xmm2=%xmm2
8963pand  %xmm14,%xmm2
8964
8965# qhasm:           xmm14 ^= xmm13
8966# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
8967# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
8968pxor  %xmm13,%xmm14
8969
8970# qhasm:           xmm14 &= xmm6
8971# asm 1: pand  <xmm6=int6464#4,<xmm14=int6464#15
8972# asm 2: pand  <xmm6=%xmm3,<xmm14=%xmm14
8973pand  %xmm3,%xmm14
8974
8975# qhasm:           xmm13 &= xmm7
8976# asm 1: pand  <xmm7=int6464#6,<xmm13=int6464#14
8977# asm 2: pand  <xmm7=%xmm5,<xmm13=%xmm13
8978pand  %xmm5,%xmm13
8979
8980# qhasm:           xmm14 ^= xmm13
8981# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
8982# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
8983pxor  %xmm13,%xmm14
8984
8985# qhasm:           xmm13 ^= xmm2
8986# asm 1: pxor  <xmm2=int6464#3,<xmm13=int6464#14
8987# asm 2: pxor  <xmm2=%xmm2,<xmm13=%xmm13
8988pxor  %xmm2,%xmm13
8989
8990# qhasm:         xmm4 ^= xmm8
8991# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#1
8992# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm0
8993pxor  %xmm8,%xmm0
8994
8995# qhasm:         xmm0 ^= xmm11
8996# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#2
8997# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm1
8998pxor  %xmm11,%xmm1
8999
9000# qhasm:         xmm7 ^= xmm5
9001# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
9002# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
9003pxor  %xmm7,%xmm5
9004
9005# qhasm:         xmm6 ^= xmm1
9006# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
9007# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
9008pxor  %xmm4,%xmm3
9009
9010# qhasm:           xmm3 = xmm7
9011# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9012# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9013movdqa %xmm5,%xmm2
9014
9015# qhasm:           xmm3 ^= xmm6
9016# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
9017# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
9018pxor  %xmm3,%xmm2
9019
9020# qhasm:           xmm3 &= xmm4
9021# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
9022# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
9023pand  %xmm0,%xmm2
9024
9025# qhasm:           xmm4 ^= xmm0
9026# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
9027# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
9028pxor  %xmm1,%xmm0
9029
9030# qhasm:           xmm4 &= xmm6
9031# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
9032# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
9033pand  %xmm3,%xmm0
9034
9035# qhasm:           xmm0 &= xmm7
9036# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
9037# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
9038pand  %xmm5,%xmm1
9039
9040# qhasm:           xmm0 ^= xmm4
9041# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
9042# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
9043pxor  %xmm0,%xmm1
9044
9045# qhasm:           xmm4 ^= xmm3
9046# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
9047# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
9048pxor  %xmm2,%xmm0
9049
9050# qhasm:           xmm2 = xmm5
9051# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9052# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9053movdqa %xmm7,%xmm2
9054
9055# qhasm:           xmm2 ^= xmm1
9056# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
9057# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
9058pxor  %xmm4,%xmm2
9059
9060# qhasm:           xmm2 &= xmm8
9061# asm 1: pand  <xmm8=int6464#9,<xmm2=int6464#3
9062# asm 2: pand  <xmm8=%xmm8,<xmm2=%xmm2
9063pand  %xmm8,%xmm2
9064
9065# qhasm:           xmm8 ^= xmm11
9066# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
9067# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
9068pxor  %xmm11,%xmm8
9069
9070# qhasm:           xmm8 &= xmm1
9071# asm 1: pand  <xmm1=int6464#5,<xmm8=int6464#9
9072# asm 2: pand  <xmm1=%xmm4,<xmm8=%xmm8
9073pand  %xmm4,%xmm8
9074
9075# qhasm:           xmm11 &= xmm5
9076# asm 1: pand  <xmm5=int6464#8,<xmm11=int6464#12
9077# asm 2: pand  <xmm5=%xmm7,<xmm11=%xmm11
9078pand  %xmm7,%xmm11
9079
9080# qhasm:           xmm8 ^= xmm11
9081# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
9082# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
9083pxor  %xmm11,%xmm8
9084
9085# qhasm:           xmm11 ^= xmm2
9086# asm 1: pxor  <xmm2=int6464#3,<xmm11=int6464#12
9087# asm 2: pxor  <xmm2=%xmm2,<xmm11=%xmm11
9088pxor  %xmm2,%xmm11
9089
9090# qhasm:         xmm14 ^= xmm4
9091# asm 1: pxor  <xmm4=int6464#1,<xmm14=int6464#15
9092# asm 2: pxor  <xmm4=%xmm0,<xmm14=%xmm14
9093pxor  %xmm0,%xmm14
9094
9095# qhasm:         xmm8 ^= xmm4
9096# asm 1: pxor  <xmm4=int6464#1,<xmm8=int6464#9
9097# asm 2: pxor  <xmm4=%xmm0,<xmm8=%xmm8
9098pxor  %xmm0,%xmm8
9099
9100# qhasm:         xmm13 ^= xmm0
9101# asm 1: pxor  <xmm0=int6464#2,<xmm13=int6464#14
9102# asm 2: pxor  <xmm0=%xmm1,<xmm13=%xmm13
9103pxor  %xmm1,%xmm13
9104
9105# qhasm:         xmm11 ^= xmm0
9106# asm 1: pxor  <xmm0=int6464#2,<xmm11=int6464#12
9107# asm 2: pxor  <xmm0=%xmm1,<xmm11=%xmm11
9108pxor  %xmm1,%xmm11
9109
9110# qhasm:         xmm4 = xmm15
9111# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
9112# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
9113movdqa %xmm15,%xmm0
9114
9115# qhasm:         xmm0 = xmm9
9116# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
9117# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
9118movdqa %xmm9,%xmm1
9119
9120# qhasm:         xmm4 ^= xmm12
9121# asm 1: pxor  <xmm12=int6464#13,<xmm4=int6464#1
9122# asm 2: pxor  <xmm12=%xmm12,<xmm4=%xmm0
9123pxor  %xmm12,%xmm0
9124
9125# qhasm:         xmm0 ^= xmm10
9126# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#2
9127# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm1
9128pxor  %xmm10,%xmm1
9129
9130# qhasm:           xmm3 = xmm7
9131# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9132# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9133movdqa %xmm5,%xmm2
9134
9135# qhasm:           xmm3 ^= xmm6
9136# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
9137# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
9138pxor  %xmm3,%xmm2
9139
9140# qhasm:           xmm3 &= xmm4
9141# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
9142# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
9143pand  %xmm0,%xmm2
9144
9145# qhasm:           xmm4 ^= xmm0
9146# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
9147# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
9148pxor  %xmm1,%xmm0
9149
9150# qhasm:           xmm4 &= xmm6
9151# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
9152# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
9153pand  %xmm3,%xmm0
9154
9155# qhasm:           xmm0 &= xmm7
9156# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
9157# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
9158pand  %xmm5,%xmm1
9159
9160# qhasm:           xmm0 ^= xmm4
9161# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
9162# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
9163pxor  %xmm0,%xmm1
9164
9165# qhasm:           xmm4 ^= xmm3
9166# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
9167# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
9168pxor  %xmm2,%xmm0
9169
9170# qhasm:           xmm2 = xmm5
9171# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9172# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9173movdqa %xmm7,%xmm2
9174
9175# qhasm:           xmm2 ^= xmm1
9176# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
9177# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
9178pxor  %xmm4,%xmm2
9179
9180# qhasm:           xmm2 &= xmm12
9181# asm 1: pand  <xmm12=int6464#13,<xmm2=int6464#3
9182# asm 2: pand  <xmm12=%xmm12,<xmm2=%xmm2
9183pand  %xmm12,%xmm2
9184
9185# qhasm:           xmm12 ^= xmm10
9186# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
9187# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
9188pxor  %xmm10,%xmm12
9189
9190# qhasm:           xmm12 &= xmm1
9191# asm 1: pand  <xmm1=int6464#5,<xmm12=int6464#13
9192# asm 2: pand  <xmm1=%xmm4,<xmm12=%xmm12
9193pand  %xmm4,%xmm12
9194
9195# qhasm:           xmm10 &= xmm5
9196# asm 1: pand  <xmm5=int6464#8,<xmm10=int6464#11
9197# asm 2: pand  <xmm5=%xmm7,<xmm10=%xmm10
9198pand  %xmm7,%xmm10
9199
9200# qhasm:           xmm12 ^= xmm10
9201# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
9202# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
9203pxor  %xmm10,%xmm12
9204
9205# qhasm:           xmm10 ^= xmm2
9206# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#11
9207# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm10
9208pxor  %xmm2,%xmm10
9209
9210# qhasm:         xmm7 ^= xmm5
9211# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
9212# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
9213pxor  %xmm7,%xmm5
9214
9215# qhasm:         xmm6 ^= xmm1
9216# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
9217# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
9218pxor  %xmm4,%xmm3
9219
9220# qhasm:           xmm3 = xmm7
9221# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9222# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9223movdqa %xmm5,%xmm2
9224
9225# qhasm:           xmm3 ^= xmm6
9226# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
9227# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
9228pxor  %xmm3,%xmm2
9229
9230# qhasm:           xmm3 &= xmm15
9231# asm 1: pand  <xmm15=int6464#16,<xmm3=int6464#3
9232# asm 2: pand  <xmm15=%xmm15,<xmm3=%xmm2
9233pand  %xmm15,%xmm2
9234
9235# qhasm:           xmm15 ^= xmm9
9236# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
9237# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
9238pxor  %xmm9,%xmm15
9239
9240# qhasm:           xmm15 &= xmm6
9241# asm 1: pand  <xmm6=int6464#4,<xmm15=int6464#16
9242# asm 2: pand  <xmm6=%xmm3,<xmm15=%xmm15
9243pand  %xmm3,%xmm15
9244
9245# qhasm:           xmm9 &= xmm7
9246# asm 1: pand  <xmm7=int6464#6,<xmm9=int6464#10
9247# asm 2: pand  <xmm7=%xmm5,<xmm9=%xmm9
9248pand  %xmm5,%xmm9
9249
9250# qhasm:           xmm15 ^= xmm9
9251# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
9252# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
9253pxor  %xmm9,%xmm15
9254
9255# qhasm:           xmm9 ^= xmm3
9256# asm 1: pxor  <xmm3=int6464#3,<xmm9=int6464#10
9257# asm 2: pxor  <xmm3=%xmm2,<xmm9=%xmm9
9258pxor  %xmm2,%xmm9
9259
9260# qhasm:         xmm15 ^= xmm4
9261# asm 1: pxor  <xmm4=int6464#1,<xmm15=int6464#16
9262# asm 2: pxor  <xmm4=%xmm0,<xmm15=%xmm15
9263pxor  %xmm0,%xmm15
9264
9265# qhasm:         xmm12 ^= xmm4
9266# asm 1: pxor  <xmm4=int6464#1,<xmm12=int6464#13
9267# asm 2: pxor  <xmm4=%xmm0,<xmm12=%xmm12
9268pxor  %xmm0,%xmm12
9269
9270# qhasm:         xmm9 ^= xmm0
9271# asm 1: pxor  <xmm0=int6464#2,<xmm9=int6464#10
9272# asm 2: pxor  <xmm0=%xmm1,<xmm9=%xmm9
9273pxor  %xmm1,%xmm9
9274
9275# qhasm:         xmm10 ^= xmm0
9276# asm 1: pxor  <xmm0=int6464#2,<xmm10=int6464#11
9277# asm 2: pxor  <xmm0=%xmm1,<xmm10=%xmm10
9278pxor  %xmm1,%xmm10
9279
9280# qhasm:       xmm15 ^= xmm8
9281# asm 1: pxor  <xmm8=int6464#9,<xmm15=int6464#16
9282# asm 2: pxor  <xmm8=%xmm8,<xmm15=%xmm15
9283pxor  %xmm8,%xmm15
9284
9285# qhasm:       xmm9 ^= xmm14
9286# asm 1: pxor  <xmm14=int6464#15,<xmm9=int6464#10
9287# asm 2: pxor  <xmm14=%xmm14,<xmm9=%xmm9
9288pxor  %xmm14,%xmm9
9289
9290# qhasm:       xmm12 ^= xmm15
9291# asm 1: pxor  <xmm15=int6464#16,<xmm12=int6464#13
9292# asm 2: pxor  <xmm15=%xmm15,<xmm12=%xmm12
9293pxor  %xmm15,%xmm12
9294
9295# qhasm:       xmm14 ^= xmm8
9296# asm 1: pxor  <xmm8=int6464#9,<xmm14=int6464#15
9297# asm 2: pxor  <xmm8=%xmm8,<xmm14=%xmm14
9298pxor  %xmm8,%xmm14
9299
9300# qhasm:       xmm8 ^= xmm9
9301# asm 1: pxor  <xmm9=int6464#10,<xmm8=int6464#9
9302# asm 2: pxor  <xmm9=%xmm9,<xmm8=%xmm8
9303pxor  %xmm9,%xmm8
9304
9305# qhasm:       xmm9 ^= xmm13
9306# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
9307# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
9308pxor  %xmm13,%xmm9
9309
9310# qhasm:       xmm13 ^= xmm10
9311# asm 1: pxor  <xmm10=int6464#11,<xmm13=int6464#14
9312# asm 2: pxor  <xmm10=%xmm10,<xmm13=%xmm13
9313pxor  %xmm10,%xmm13
9314
9315# qhasm:       xmm12 ^= xmm13
9316# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
9317# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
9318pxor  %xmm13,%xmm12
9319
9320# qhasm:       xmm10 ^= xmm11
9321# asm 1: pxor  <xmm11=int6464#12,<xmm10=int6464#11
9322# asm 2: pxor  <xmm11=%xmm11,<xmm10=%xmm10
9323pxor  %xmm11,%xmm10
9324
9325# qhasm:       xmm11 ^= xmm13
9326# asm 1: pxor  <xmm13=int6464#14,<xmm11=int6464#12
9327# asm 2: pxor  <xmm13=%xmm13,<xmm11=%xmm11
9328pxor  %xmm13,%xmm11
9329
9330# qhasm:       xmm14 ^= xmm11
9331# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
9332# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
9333pxor  %xmm11,%xmm14
9334
9335# qhasm:     xmm0 = shuffle dwords of xmm8 by 0x93
9336# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
9337# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
9338pshufd $0x93,%xmm8,%xmm0
9339
9340# qhasm:     xmm1 = shuffle dwords of xmm9 by 0x93
9341# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
9342# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
9343pshufd $0x93,%xmm9,%xmm1
9344
9345# qhasm:     xmm2 = shuffle dwords of xmm12 by 0x93
9346# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
9347# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
9348pshufd $0x93,%xmm12,%xmm2
9349
9350# qhasm:     xmm3 = shuffle dwords of xmm14 by 0x93
9351# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
9352# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
9353pshufd $0x93,%xmm14,%xmm3
9354
9355# qhasm:     xmm4 = shuffle dwords of xmm11 by 0x93
9356# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
9357# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
9358pshufd $0x93,%xmm11,%xmm4
9359
9360# qhasm:     xmm5 = shuffle dwords of xmm15 by 0x93
9361# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
9362# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
9363pshufd $0x93,%xmm15,%xmm5
9364
9365# qhasm:     xmm6 = shuffle dwords of xmm10 by 0x93
9366# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
9367# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
9368pshufd $0x93,%xmm10,%xmm6
9369
9370# qhasm:     xmm7 = shuffle dwords of xmm13 by 0x93
9371# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
9372# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
9373pshufd $0x93,%xmm13,%xmm7
9374
9375# qhasm:     xmm8 ^= xmm0
9376# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
9377# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
9378pxor  %xmm0,%xmm8
9379
9380# qhasm:     xmm9 ^= xmm1
9381# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
9382# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
9383pxor  %xmm1,%xmm9
9384
9385# qhasm:     xmm12 ^= xmm2
9386# asm 1: pxor  <xmm2=int6464#3,<xmm12=int6464#13
9387# asm 2: pxor  <xmm2=%xmm2,<xmm12=%xmm12
9388pxor  %xmm2,%xmm12
9389
9390# qhasm:     xmm14 ^= xmm3
9391# asm 1: pxor  <xmm3=int6464#4,<xmm14=int6464#15
9392# asm 2: pxor  <xmm3=%xmm3,<xmm14=%xmm14
9393pxor  %xmm3,%xmm14
9394
9395# qhasm:     xmm11 ^= xmm4
9396# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
9397# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
9398pxor  %xmm4,%xmm11
9399
9400# qhasm:     xmm15 ^= xmm5
9401# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
9402# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
9403pxor  %xmm5,%xmm15
9404
9405# qhasm:     xmm10 ^= xmm6
9406# asm 1: pxor  <xmm6=int6464#7,<xmm10=int6464#11
9407# asm 2: pxor  <xmm6=%xmm6,<xmm10=%xmm10
9408pxor  %xmm6,%xmm10
9409
9410# qhasm:     xmm13 ^= xmm7
9411# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
9412# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
9413pxor  %xmm7,%xmm13
9414
9415# qhasm:     xmm0 ^= xmm13
9416# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
9417# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
9418pxor  %xmm13,%xmm0
9419
9420# qhasm:     xmm1 ^= xmm8
9421# asm 1: pxor  <xmm8=int6464#9,<xmm1=int6464#2
9422# asm 2: pxor  <xmm8=%xmm8,<xmm1=%xmm1
9423pxor  %xmm8,%xmm1
9424
9425# qhasm:     xmm2 ^= xmm9
9426# asm 1: pxor  <xmm9=int6464#10,<xmm2=int6464#3
9427# asm 2: pxor  <xmm9=%xmm9,<xmm2=%xmm2
9428pxor  %xmm9,%xmm2
9429
9430# qhasm:     xmm1 ^= xmm13
9431# asm 1: pxor  <xmm13=int6464#14,<xmm1=int6464#2
9432# asm 2: pxor  <xmm13=%xmm13,<xmm1=%xmm1
9433pxor  %xmm13,%xmm1
9434
9435# qhasm:     xmm3 ^= xmm12
9436# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
9437# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
9438pxor  %xmm12,%xmm3
9439
9440# qhasm:     xmm4 ^= xmm14
9441# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#5
9442# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm4
9443pxor  %xmm14,%xmm4
9444
9445# qhasm:     xmm5 ^= xmm11
9446# asm 1: pxor  <xmm11=int6464#12,<xmm5=int6464#6
9447# asm 2: pxor  <xmm11=%xmm11,<xmm5=%xmm5
9448pxor  %xmm11,%xmm5
9449
9450# qhasm:     xmm3 ^= xmm13
9451# asm 1: pxor  <xmm13=int6464#14,<xmm3=int6464#4
9452# asm 2: pxor  <xmm13=%xmm13,<xmm3=%xmm3
9453pxor  %xmm13,%xmm3
9454
9455# qhasm:     xmm6 ^= xmm15
9456# asm 1: pxor  <xmm15=int6464#16,<xmm6=int6464#7
9457# asm 2: pxor  <xmm15=%xmm15,<xmm6=%xmm6
9458pxor  %xmm15,%xmm6
9459
9460# qhasm:     xmm7 ^= xmm10
9461# asm 1: pxor  <xmm10=int6464#11,<xmm7=int6464#8
9462# asm 2: pxor  <xmm10=%xmm10,<xmm7=%xmm7
9463pxor  %xmm10,%xmm7
9464
9465# qhasm:     xmm4 ^= xmm13
9466# asm 1: pxor  <xmm13=int6464#14,<xmm4=int6464#5
9467# asm 2: pxor  <xmm13=%xmm13,<xmm4=%xmm4
9468pxor  %xmm13,%xmm4
9469
9470# qhasm:     xmm8 = shuffle dwords of xmm8 by 0x4E
9471# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
9472# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
9473pshufd $0x4E,%xmm8,%xmm8
9474
9475# qhasm:     xmm9 = shuffle dwords of xmm9 by 0x4E
9476# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
9477# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
9478pshufd $0x4E,%xmm9,%xmm9
9479
9480# qhasm:     xmm12 = shuffle dwords of xmm12 by 0x4E
9481# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
9482# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
9483pshufd $0x4E,%xmm12,%xmm12
9484
9485# qhasm:     xmm14 = shuffle dwords of xmm14 by 0x4E
9486# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
9487# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
9488pshufd $0x4E,%xmm14,%xmm14
9489
9490# qhasm:     xmm11 = shuffle dwords of xmm11 by 0x4E
9491# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
9492# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
9493pshufd $0x4E,%xmm11,%xmm11
9494
9495# qhasm:     xmm15 = shuffle dwords of xmm15 by 0x4E
9496# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
9497# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
9498pshufd $0x4E,%xmm15,%xmm15
9499
9500# qhasm:     xmm10 = shuffle dwords of xmm10 by 0x4E
9501# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
9502# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
9503pshufd $0x4E,%xmm10,%xmm10
9504
9505# qhasm:     xmm13 = shuffle dwords of xmm13 by 0x4E
9506# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
9507# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
9508pshufd $0x4E,%xmm13,%xmm13
9509
9510# qhasm:     xmm0 ^= xmm8
9511# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
9512# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
9513pxor  %xmm8,%xmm0
9514
9515# qhasm:     xmm1 ^= xmm9
9516# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
9517# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
9518pxor  %xmm9,%xmm1
9519
9520# qhasm:     xmm2 ^= xmm12
9521# asm 1: pxor  <xmm12=int6464#13,<xmm2=int6464#3
9522# asm 2: pxor  <xmm12=%xmm12,<xmm2=%xmm2
9523pxor  %xmm12,%xmm2
9524
9525# qhasm:     xmm3 ^= xmm14
9526# asm 1: pxor  <xmm14=int6464#15,<xmm3=int6464#4
9527# asm 2: pxor  <xmm14=%xmm14,<xmm3=%xmm3
9528pxor  %xmm14,%xmm3
9529
9530# qhasm:     xmm4 ^= xmm11
9531# asm 1: pxor  <xmm11=int6464#12,<xmm4=int6464#5
9532# asm 2: pxor  <xmm11=%xmm11,<xmm4=%xmm4
9533pxor  %xmm11,%xmm4
9534
9535# qhasm:     xmm5 ^= xmm15
9536# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
9537# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
9538pxor  %xmm15,%xmm5
9539
9540# qhasm:     xmm6 ^= xmm10
9541# asm 1: pxor  <xmm10=int6464#11,<xmm6=int6464#7
9542# asm 2: pxor  <xmm10=%xmm10,<xmm6=%xmm6
9543pxor  %xmm10,%xmm6
9544
9545# qhasm:     xmm7 ^= xmm13
9546# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
9547# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
9548pxor  %xmm13,%xmm7
9549
9550# qhasm:     xmm0 ^= *(int128 *)(c + 1024)
9551# asm 1: pxor 1024(<c=int64#4),<xmm0=int6464#1
9552# asm 2: pxor 1024(<c=%rcx),<xmm0=%xmm0
9553pxor 1024(%rcx),%xmm0
9554
9555# qhasm:     shuffle bytes of xmm0 by SR
9556# asm 1: pshufb SR,<xmm0=int6464#1
9557# asm 2: pshufb SR,<xmm0=%xmm0
9558pshufb SR,%xmm0
9559
9560# qhasm:     xmm1 ^= *(int128 *)(c + 1040)
9561# asm 1: pxor 1040(<c=int64#4),<xmm1=int6464#2
9562# asm 2: pxor 1040(<c=%rcx),<xmm1=%xmm1
9563pxor 1040(%rcx),%xmm1
9564
9565# qhasm:     shuffle bytes of xmm1 by SR
9566# asm 1: pshufb SR,<xmm1=int6464#2
9567# asm 2: pshufb SR,<xmm1=%xmm1
9568pshufb SR,%xmm1
9569
9570# qhasm:     xmm2 ^= *(int128 *)(c + 1056)
9571# asm 1: pxor 1056(<c=int64#4),<xmm2=int6464#3
9572# asm 2: pxor 1056(<c=%rcx),<xmm2=%xmm2
9573pxor 1056(%rcx),%xmm2
9574
9575# qhasm:     shuffle bytes of xmm2 by SR
9576# asm 1: pshufb SR,<xmm2=int6464#3
9577# asm 2: pshufb SR,<xmm2=%xmm2
9578pshufb SR,%xmm2
9579
9580# qhasm:     xmm3 ^= *(int128 *)(c + 1072)
9581# asm 1: pxor 1072(<c=int64#4),<xmm3=int6464#4
9582# asm 2: pxor 1072(<c=%rcx),<xmm3=%xmm3
9583pxor 1072(%rcx),%xmm3
9584
9585# qhasm:     shuffle bytes of xmm3 by SR
9586# asm 1: pshufb SR,<xmm3=int6464#4
9587# asm 2: pshufb SR,<xmm3=%xmm3
9588pshufb SR,%xmm3
9589
9590# qhasm:     xmm4 ^= *(int128 *)(c + 1088)
9591# asm 1: pxor 1088(<c=int64#4),<xmm4=int6464#5
9592# asm 2: pxor 1088(<c=%rcx),<xmm4=%xmm4
9593pxor 1088(%rcx),%xmm4
9594
9595# qhasm:     shuffle bytes of xmm4 by SR
9596# asm 1: pshufb SR,<xmm4=int6464#5
9597# asm 2: pshufb SR,<xmm4=%xmm4
9598pshufb SR,%xmm4
9599
9600# qhasm:     xmm5 ^= *(int128 *)(c + 1104)
9601# asm 1: pxor 1104(<c=int64#4),<xmm5=int6464#6
9602# asm 2: pxor 1104(<c=%rcx),<xmm5=%xmm5
9603pxor 1104(%rcx),%xmm5
9604
9605# qhasm:     shuffle bytes of xmm5 by SR
9606# asm 1: pshufb SR,<xmm5=int6464#6
9607# asm 2: pshufb SR,<xmm5=%xmm5
9608pshufb SR,%xmm5
9609
9610# qhasm:     xmm6 ^= *(int128 *)(c + 1120)
9611# asm 1: pxor 1120(<c=int64#4),<xmm6=int6464#7
9612# asm 2: pxor 1120(<c=%rcx),<xmm6=%xmm6
9613pxor 1120(%rcx),%xmm6
9614
9615# qhasm:     shuffle bytes of xmm6 by SR
9616# asm 1: pshufb SR,<xmm6=int6464#7
9617# asm 2: pshufb SR,<xmm6=%xmm6
9618pshufb SR,%xmm6
9619
9620# qhasm:     xmm7 ^= *(int128 *)(c + 1136)
9621# asm 1: pxor 1136(<c=int64#4),<xmm7=int6464#8
9622# asm 2: pxor 1136(<c=%rcx),<xmm7=%xmm7
9623pxor 1136(%rcx),%xmm7
9624
9625# qhasm:     shuffle bytes of xmm7 by SR
9626# asm 1: pshufb SR,<xmm7=int6464#8
9627# asm 2: pshufb SR,<xmm7=%xmm7
9628pshufb SR,%xmm7
9629
9630# qhasm:       xmm5 ^= xmm6
9631# asm 1: pxor  <xmm6=int6464#7,<xmm5=int6464#6
9632# asm 2: pxor  <xmm6=%xmm6,<xmm5=%xmm5
9633pxor  %xmm6,%xmm5
9634
9635# qhasm:       xmm2 ^= xmm1
9636# asm 1: pxor  <xmm1=int6464#2,<xmm2=int6464#3
9637# asm 2: pxor  <xmm1=%xmm1,<xmm2=%xmm2
9638pxor  %xmm1,%xmm2
9639
9640# qhasm:       xmm5 ^= xmm0
9641# asm 1: pxor  <xmm0=int6464#1,<xmm5=int6464#6
9642# asm 2: pxor  <xmm0=%xmm0,<xmm5=%xmm5
9643pxor  %xmm0,%xmm5
9644
9645# qhasm:       xmm6 ^= xmm2
9646# asm 1: pxor  <xmm2=int6464#3,<xmm6=int6464#7
9647# asm 2: pxor  <xmm2=%xmm2,<xmm6=%xmm6
9648pxor  %xmm2,%xmm6
9649
9650# qhasm:       xmm3 ^= xmm0
9651# asm 1: pxor  <xmm0=int6464#1,<xmm3=int6464#4
9652# asm 2: pxor  <xmm0=%xmm0,<xmm3=%xmm3
9653pxor  %xmm0,%xmm3
9654
9655# qhasm:       xmm6 ^= xmm3
9656# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
9657# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
9658pxor  %xmm3,%xmm6
9659
9660# qhasm:       xmm3 ^= xmm7
9661# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#4
9662# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm3
9663pxor  %xmm7,%xmm3
9664
9665# qhasm:       xmm3 ^= xmm4
9666# asm 1: pxor  <xmm4=int6464#5,<xmm3=int6464#4
9667# asm 2: pxor  <xmm4=%xmm4,<xmm3=%xmm3
9668pxor  %xmm4,%xmm3
9669
9670# qhasm:       xmm7 ^= xmm5
9671# asm 1: pxor  <xmm5=int6464#6,<xmm7=int6464#8
9672# asm 2: pxor  <xmm5=%xmm5,<xmm7=%xmm7
9673pxor  %xmm5,%xmm7
9674
9675# qhasm:       xmm3 ^= xmm1
9676# asm 1: pxor  <xmm1=int6464#2,<xmm3=int6464#4
9677# asm 2: pxor  <xmm1=%xmm1,<xmm3=%xmm3
9678pxor  %xmm1,%xmm3
9679
9680# qhasm:       xmm4 ^= xmm5
9681# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
9682# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
9683pxor  %xmm5,%xmm4
9684
9685# qhasm:       xmm2 ^= xmm7
9686# asm 1: pxor  <xmm7=int6464#8,<xmm2=int6464#3
9687# asm 2: pxor  <xmm7=%xmm7,<xmm2=%xmm2
9688pxor  %xmm7,%xmm2
9689
9690# qhasm:       xmm1 ^= xmm5
9691# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
9692# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
9693pxor  %xmm5,%xmm1
9694
9695# qhasm:       xmm11 = xmm7
9696# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
9697# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
9698movdqa %xmm7,%xmm8
9699
9700# qhasm:       xmm10 = xmm1
9701# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9702# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9703movdqa %xmm1,%xmm9
9704
9705# qhasm:       xmm9 = xmm5
9706# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
9707# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
9708movdqa %xmm5,%xmm10
9709
9710# qhasm:       xmm13 = xmm2
9711# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
9712# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
9713movdqa %xmm2,%xmm11
9714
9715# qhasm:       xmm12 = xmm6
9716# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
9717# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
9718movdqa %xmm6,%xmm12
9719
9720# qhasm:       xmm11 ^= xmm4
9721# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#9
9722# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm8
9723pxor  %xmm4,%xmm8
9724
9725# qhasm:       xmm10 ^= xmm2
9726# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#10
9727# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm9
9728pxor  %xmm2,%xmm9
9729
9730# qhasm:       xmm9 ^= xmm3
9731# asm 1: pxor  <xmm3=int6464#4,<xmm9=int6464#11
9732# asm 2: pxor  <xmm3=%xmm3,<xmm9=%xmm10
9733pxor  %xmm3,%xmm10
9734
9735# qhasm:       xmm13 ^= xmm4
9736# asm 1: pxor  <xmm4=int6464#5,<xmm13=int6464#12
9737# asm 2: pxor  <xmm4=%xmm4,<xmm13=%xmm11
9738pxor  %xmm4,%xmm11
9739
9740# qhasm:       xmm12 ^= xmm0
9741# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
9742# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
9743pxor  %xmm0,%xmm12
9744
9745# qhasm:       xmm14 = xmm11
9746# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9747# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9748movdqa %xmm8,%xmm13
9749
9750# qhasm:       xmm8 = xmm10
9751# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9752# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9753movdqa %xmm9,%xmm14
9754
9755# qhasm:       xmm15 = xmm11
9756# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9757# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9758movdqa %xmm8,%xmm15
9759
9760# qhasm:       xmm10 |= xmm9
9761# asm 1: por   <xmm9=int6464#11,<xmm10=int6464#10
9762# asm 2: por   <xmm9=%xmm10,<xmm10=%xmm9
9763por   %xmm10,%xmm9
9764
9765# qhasm:       xmm11 |= xmm12
9766# asm 1: por   <xmm12=int6464#13,<xmm11=int6464#9
9767# asm 2: por   <xmm12=%xmm12,<xmm11=%xmm8
9768por   %xmm12,%xmm8
9769
9770# qhasm:       xmm15 ^= xmm8
9771# asm 1: pxor  <xmm8=int6464#15,<xmm15=int6464#16
9772# asm 2: pxor  <xmm8=%xmm14,<xmm15=%xmm15
9773pxor  %xmm14,%xmm15
9774
9775# qhasm:       xmm14 &= xmm12
9776# asm 1: pand  <xmm12=int6464#13,<xmm14=int6464#14
9777# asm 2: pand  <xmm12=%xmm12,<xmm14=%xmm13
9778pand  %xmm12,%xmm13
9779
9780# qhasm:       xmm8 &= xmm9
9781# asm 1: pand  <xmm9=int6464#11,<xmm8=int6464#15
9782# asm 2: pand  <xmm9=%xmm10,<xmm8=%xmm14
9783pand  %xmm10,%xmm14
9784
9785# qhasm:       xmm12 ^= xmm9
9786# asm 1: pxor  <xmm9=int6464#11,<xmm12=int6464#13
9787# asm 2: pxor  <xmm9=%xmm10,<xmm12=%xmm12
9788pxor  %xmm10,%xmm12
9789
9790# qhasm:       xmm15 &= xmm12
9791# asm 1: pand  <xmm12=int6464#13,<xmm15=int6464#16
9792# asm 2: pand  <xmm12=%xmm12,<xmm15=%xmm15
9793pand  %xmm12,%xmm15
9794
9795# qhasm:       xmm12 = xmm3
9796# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
9797# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
9798movdqa %xmm3,%xmm10
9799
9800# qhasm:       xmm12 ^= xmm0
9801# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#11
9802# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm10
9803pxor  %xmm0,%xmm10
9804
9805# qhasm:       xmm13 &= xmm12
9806# asm 1: pand  <xmm12=int6464#11,<xmm13=int6464#12
9807# asm 2: pand  <xmm12=%xmm10,<xmm13=%xmm11
9808pand  %xmm10,%xmm11
9809
9810# qhasm:       xmm11 ^= xmm13
9811# asm 1: pxor  <xmm13=int6464#12,<xmm11=int6464#9
9812# asm 2: pxor  <xmm13=%xmm11,<xmm11=%xmm8
9813pxor  %xmm11,%xmm8
9814
9815# qhasm:       xmm10 ^= xmm13
9816# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
9817# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
9818pxor  %xmm11,%xmm9
9819
9820# qhasm:       xmm13 = xmm7
9821# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
9822# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
9823movdqa %xmm7,%xmm10
9824
9825# qhasm:       xmm13 ^= xmm1
9826# asm 1: pxor  <xmm1=int6464#2,<xmm13=int6464#11
9827# asm 2: pxor  <xmm1=%xmm1,<xmm13=%xmm10
9828pxor  %xmm1,%xmm10
9829
9830# qhasm:       xmm12 = xmm5
9831# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
9832# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
9833movdqa %xmm5,%xmm11
9834
9835# qhasm:       xmm9 = xmm13
9836# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9837# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9838movdqa %xmm10,%xmm12
9839
9840# qhasm:       xmm12 ^= xmm6
9841# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#12
9842# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm11
9843pxor  %xmm6,%xmm11
9844
9845# qhasm:       xmm9 |= xmm12
9846# asm 1: por   <xmm12=int6464#12,<xmm9=int6464#13
9847# asm 2: por   <xmm12=%xmm11,<xmm9=%xmm12
9848por   %xmm11,%xmm12
9849
9850# qhasm:       xmm13 &= xmm12
9851# asm 1: pand  <xmm12=int6464#12,<xmm13=int6464#11
9852# asm 2: pand  <xmm12=%xmm11,<xmm13=%xmm10
9853pand  %xmm11,%xmm10
9854
9855# qhasm:       xmm8 ^= xmm13
9856# asm 1: pxor  <xmm13=int6464#11,<xmm8=int6464#15
9857# asm 2: pxor  <xmm13=%xmm10,<xmm8=%xmm14
9858pxor  %xmm10,%xmm14
9859
9860# qhasm:       xmm11 ^= xmm15
9861# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#9
9862# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm8
9863pxor  %xmm15,%xmm8
9864
9865# qhasm:       xmm10 ^= xmm14
9866# asm 1: pxor  <xmm14=int6464#14,<xmm10=int6464#10
9867# asm 2: pxor  <xmm14=%xmm13,<xmm10=%xmm9
9868pxor  %xmm13,%xmm9
9869
9870# qhasm:       xmm9 ^= xmm15
9871# asm 1: pxor  <xmm15=int6464#16,<xmm9=int6464#13
9872# asm 2: pxor  <xmm15=%xmm15,<xmm9=%xmm12
9873pxor  %xmm15,%xmm12
9874
9875# qhasm:       xmm8 ^= xmm14
9876# asm 1: pxor  <xmm14=int6464#14,<xmm8=int6464#15
9877# asm 2: pxor  <xmm14=%xmm13,<xmm8=%xmm14
9878pxor  %xmm13,%xmm14
9879
9880# qhasm:       xmm9 ^= xmm14
9881# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
9882# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
9883pxor  %xmm13,%xmm12
9884
9885# qhasm:       xmm12 = xmm2
9886# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
9887# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
9888movdqa %xmm2,%xmm10
9889
9890# qhasm:       xmm13 = xmm4
9891# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
9892# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
9893movdqa %xmm4,%xmm11
9894
9895# qhasm:       xmm14 = xmm1
9896# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
9897# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
9898movdqa %xmm1,%xmm13
9899
9900# qhasm:       xmm15 = xmm7
9901# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
9902# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
9903movdqa %xmm7,%xmm15
9904
9905# qhasm:       xmm12 &= xmm3
9906# asm 1: pand  <xmm3=int6464#4,<xmm12=int6464#11
9907# asm 2: pand  <xmm3=%xmm3,<xmm12=%xmm10
9908pand  %xmm3,%xmm10
9909
9910# qhasm:       xmm13 &= xmm0
9911# asm 1: pand  <xmm0=int6464#1,<xmm13=int6464#12
9912# asm 2: pand  <xmm0=%xmm0,<xmm13=%xmm11
9913pand  %xmm0,%xmm11
9914
9915# qhasm:       xmm14 &= xmm5
9916# asm 1: pand  <xmm5=int6464#6,<xmm14=int6464#14
9917# asm 2: pand  <xmm5=%xmm5,<xmm14=%xmm13
9918pand  %xmm5,%xmm13
9919
9920# qhasm:       xmm15 |= xmm6
9921# asm 1: por   <xmm6=int6464#7,<xmm15=int6464#16
9922# asm 2: por   <xmm6=%xmm6,<xmm15=%xmm15
9923por   %xmm6,%xmm15
9924
9925# qhasm:       xmm11 ^= xmm12
9926# asm 1: pxor  <xmm12=int6464#11,<xmm11=int6464#9
9927# asm 2: pxor  <xmm12=%xmm10,<xmm11=%xmm8
9928pxor  %xmm10,%xmm8
9929
9930# qhasm:       xmm10 ^= xmm13
9931# asm 1: pxor  <xmm13=int6464#12,<xmm10=int6464#10
9932# asm 2: pxor  <xmm13=%xmm11,<xmm10=%xmm9
9933pxor  %xmm11,%xmm9
9934
9935# qhasm:       xmm9 ^= xmm14
9936# asm 1: pxor  <xmm14=int6464#14,<xmm9=int6464#13
9937# asm 2: pxor  <xmm14=%xmm13,<xmm9=%xmm12
9938pxor  %xmm13,%xmm12
9939
9940# qhasm:       xmm8 ^= xmm15
9941# asm 1: pxor  <xmm15=int6464#16,<xmm8=int6464#15
9942# asm 2: pxor  <xmm15=%xmm15,<xmm8=%xmm14
9943pxor  %xmm15,%xmm14
9944
9945# qhasm:       xmm12 = xmm11
9946# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
9947# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
9948movdqa %xmm8,%xmm10
9949
9950# qhasm:       xmm12 ^= xmm10
9951# asm 1: pxor  <xmm10=int6464#10,<xmm12=int6464#11
9952# asm 2: pxor  <xmm10=%xmm9,<xmm12=%xmm10
9953pxor  %xmm9,%xmm10
9954
9955# qhasm:       xmm11 &= xmm9
9956# asm 1: pand  <xmm9=int6464#13,<xmm11=int6464#9
9957# asm 2: pand  <xmm9=%xmm12,<xmm11=%xmm8
9958pand  %xmm12,%xmm8
9959
9960# qhasm:       xmm14 = xmm8
9961# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
9962# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
9963movdqa %xmm14,%xmm11
9964
9965# qhasm:       xmm14 ^= xmm11
9966# asm 1: pxor  <xmm11=int6464#9,<xmm14=int6464#12
9967# asm 2: pxor  <xmm11=%xmm8,<xmm14=%xmm11
9968pxor  %xmm8,%xmm11
9969
9970# qhasm:       xmm15 = xmm12
9971# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
9972# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
9973movdqa %xmm10,%xmm13
9974
9975# qhasm:       xmm15 &= xmm14
9976# asm 1: pand  <xmm14=int6464#12,<xmm15=int6464#14
9977# asm 2: pand  <xmm14=%xmm11,<xmm15=%xmm13
9978pand  %xmm11,%xmm13
9979
9980# qhasm:       xmm15 ^= xmm10
9981# asm 1: pxor  <xmm10=int6464#10,<xmm15=int6464#14
9982# asm 2: pxor  <xmm10=%xmm9,<xmm15=%xmm13
9983pxor  %xmm9,%xmm13
9984
9985# qhasm:       xmm13 = xmm9
9986# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
9987# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
9988movdqa %xmm12,%xmm15
9989
9990# qhasm:       xmm13 ^= xmm8
9991# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
9992# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
9993pxor  %xmm14,%xmm15
9994
9995# qhasm:       xmm11 ^= xmm10
9996# asm 1: pxor  <xmm10=int6464#10,<xmm11=int6464#9
9997# asm 2: pxor  <xmm10=%xmm9,<xmm11=%xmm8
9998pxor  %xmm9,%xmm8
9999
10000# qhasm:       xmm13 &= xmm11
10001# asm 1: pand  <xmm11=int6464#9,<xmm13=int6464#16
10002# asm 2: pand  <xmm11=%xmm8,<xmm13=%xmm15
10003pand  %xmm8,%xmm15
10004
10005# qhasm:       xmm13 ^= xmm8
10006# asm 1: pxor  <xmm8=int6464#15,<xmm13=int6464#16
10007# asm 2: pxor  <xmm8=%xmm14,<xmm13=%xmm15
10008pxor  %xmm14,%xmm15
10009
10010# qhasm:       xmm9 ^= xmm13
10011# asm 1: pxor  <xmm13=int6464#16,<xmm9=int6464#13
10012# asm 2: pxor  <xmm13=%xmm15,<xmm9=%xmm12
10013pxor  %xmm15,%xmm12
10014
10015# qhasm:       xmm10 = xmm14
10016# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10017# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10018movdqa %xmm11,%xmm8
10019
10020# qhasm:       xmm10 ^= xmm13
10021# asm 1: pxor  <xmm13=int6464#16,<xmm10=int6464#9
10022# asm 2: pxor  <xmm13=%xmm15,<xmm10=%xmm8
10023pxor  %xmm15,%xmm8
10024
10025# qhasm:       xmm10 &= xmm8
10026# asm 1: pand  <xmm8=int6464#15,<xmm10=int6464#9
10027# asm 2: pand  <xmm8=%xmm14,<xmm10=%xmm8
10028pand  %xmm14,%xmm8
10029
10030# qhasm:       xmm9 ^= xmm10
10031# asm 1: pxor  <xmm10=int6464#9,<xmm9=int6464#13
10032# asm 2: pxor  <xmm10=%xmm8,<xmm9=%xmm12
10033pxor  %xmm8,%xmm12
10034
10035# qhasm:       xmm14 ^= xmm10
10036# asm 1: pxor  <xmm10=int6464#9,<xmm14=int6464#12
10037# asm 2: pxor  <xmm10=%xmm8,<xmm14=%xmm11
10038pxor  %xmm8,%xmm11
10039
10040# qhasm:       xmm14 &= xmm15
10041# asm 1: pand  <xmm15=int6464#14,<xmm14=int6464#12
10042# asm 2: pand  <xmm15=%xmm13,<xmm14=%xmm11
10043pand  %xmm13,%xmm11
10044
10045# qhasm:       xmm14 ^= xmm12
10046# asm 1: pxor  <xmm12=int6464#11,<xmm14=int6464#12
10047# asm 2: pxor  <xmm12=%xmm10,<xmm14=%xmm11
10048pxor  %xmm10,%xmm11
10049
10050# qhasm:         xmm12 = xmm6
10051# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
10052# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
10053movdqa %xmm6,%xmm8
10054
10055# qhasm:         xmm8 = xmm5
10056# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
10057# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
10058movdqa %xmm5,%xmm9
10059
10060# qhasm:           xmm10 = xmm15
10061# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10062# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10063movdqa %xmm13,%xmm10
10064
10065# qhasm:           xmm10 ^= xmm14
10066# asm 1: pxor  <xmm14=int6464#12,<xmm10=int6464#11
10067# asm 2: pxor  <xmm14=%xmm11,<xmm10=%xmm10
10068pxor  %xmm11,%xmm10
10069
10070# qhasm:           xmm10 &= xmm6
10071# asm 1: pand  <xmm6=int6464#7,<xmm10=int6464#11
10072# asm 2: pand  <xmm6=%xmm6,<xmm10=%xmm10
10073pand  %xmm6,%xmm10
10074
10075# qhasm:           xmm6 ^= xmm5
10076# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
10077# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
10078pxor  %xmm5,%xmm6
10079
10080# qhasm:           xmm6 &= xmm14
10081# asm 1: pand  <xmm14=int6464#12,<xmm6=int6464#7
10082# asm 2: pand  <xmm14=%xmm11,<xmm6=%xmm6
10083pand  %xmm11,%xmm6
10084
10085# qhasm:           xmm5 &= xmm15
10086# asm 1: pand  <xmm15=int6464#14,<xmm5=int6464#6
10087# asm 2: pand  <xmm15=%xmm13,<xmm5=%xmm5
10088pand  %xmm13,%xmm5
10089
10090# qhasm:           xmm6 ^= xmm5
10091# asm 1: pxor  <xmm5=int6464#6,<xmm6=int6464#7
10092# asm 2: pxor  <xmm5=%xmm5,<xmm6=%xmm6
10093pxor  %xmm5,%xmm6
10094
10095# qhasm:           xmm5 ^= xmm10
10096# asm 1: pxor  <xmm10=int6464#11,<xmm5=int6464#6
10097# asm 2: pxor  <xmm10=%xmm10,<xmm5=%xmm5
10098pxor  %xmm10,%xmm5
10099
10100# qhasm:         xmm12 ^= xmm0
10101# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#9
10102# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm8
10103pxor  %xmm0,%xmm8
10104
10105# qhasm:         xmm8 ^= xmm3
10106# asm 1: pxor  <xmm3=int6464#4,<xmm8=int6464#10
10107# asm 2: pxor  <xmm3=%xmm3,<xmm8=%xmm9
10108pxor  %xmm3,%xmm9
10109
10110# qhasm:         xmm15 ^= xmm13
10111# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
10112# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
10113pxor  %xmm15,%xmm13
10114
10115# qhasm:         xmm14 ^= xmm9
10116# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
10117# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
10118pxor  %xmm12,%xmm11
10119
10120# qhasm:           xmm11 = xmm15
10121# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10122# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10123movdqa %xmm13,%xmm10
10124
10125# qhasm:           xmm11 ^= xmm14
10126# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
10127# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
10128pxor  %xmm11,%xmm10
10129
10130# qhasm:           xmm11 &= xmm12
10131# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
10132# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
10133pand  %xmm8,%xmm10
10134
10135# qhasm:           xmm12 ^= xmm8
10136# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
10137# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
10138pxor  %xmm9,%xmm8
10139
10140# qhasm:           xmm12 &= xmm14
10141# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
10142# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
10143pand  %xmm11,%xmm8
10144
10145# qhasm:           xmm8 &= xmm15
10146# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
10147# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
10148pand  %xmm13,%xmm9
10149
10150# qhasm:           xmm8 ^= xmm12
10151# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
10152# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
10153pxor  %xmm8,%xmm9
10154
10155# qhasm:           xmm12 ^= xmm11
10156# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
10157# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
10158pxor  %xmm10,%xmm8
10159
10160# qhasm:           xmm10 = xmm13
10161# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10162# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10163movdqa %xmm15,%xmm10
10164
10165# qhasm:           xmm10 ^= xmm9
10166# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
10167# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
10168pxor  %xmm12,%xmm10
10169
10170# qhasm:           xmm10 &= xmm0
10171# asm 1: pand  <xmm0=int6464#1,<xmm10=int6464#11
10172# asm 2: pand  <xmm0=%xmm0,<xmm10=%xmm10
10173pand  %xmm0,%xmm10
10174
10175# qhasm:           xmm0 ^= xmm3
10176# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
10177# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
10178pxor  %xmm3,%xmm0
10179
10180# qhasm:           xmm0 &= xmm9
10181# asm 1: pand  <xmm9=int6464#13,<xmm0=int6464#1
10182# asm 2: pand  <xmm9=%xmm12,<xmm0=%xmm0
10183pand  %xmm12,%xmm0
10184
10185# qhasm:           xmm3 &= xmm13
10186# asm 1: pand  <xmm13=int6464#16,<xmm3=int6464#4
10187# asm 2: pand  <xmm13=%xmm15,<xmm3=%xmm3
10188pand  %xmm15,%xmm3
10189
10190# qhasm:           xmm0 ^= xmm3
10191# asm 1: pxor  <xmm3=int6464#4,<xmm0=int6464#1
10192# asm 2: pxor  <xmm3=%xmm3,<xmm0=%xmm0
10193pxor  %xmm3,%xmm0
10194
10195# qhasm:           xmm3 ^= xmm10
10196# asm 1: pxor  <xmm10=int6464#11,<xmm3=int6464#4
10197# asm 2: pxor  <xmm10=%xmm10,<xmm3=%xmm3
10198pxor  %xmm10,%xmm3
10199
10200# qhasm:         xmm6 ^= xmm12
10201# asm 1: pxor  <xmm12=int6464#9,<xmm6=int6464#7
10202# asm 2: pxor  <xmm12=%xmm8,<xmm6=%xmm6
10203pxor  %xmm8,%xmm6
10204
10205# qhasm:         xmm0 ^= xmm12
10206# asm 1: pxor  <xmm12=int6464#9,<xmm0=int6464#1
10207# asm 2: pxor  <xmm12=%xmm8,<xmm0=%xmm0
10208pxor  %xmm8,%xmm0
10209
10210# qhasm:         xmm5 ^= xmm8
10211# asm 1: pxor  <xmm8=int6464#10,<xmm5=int6464#6
10212# asm 2: pxor  <xmm8=%xmm9,<xmm5=%xmm5
10213pxor  %xmm9,%xmm5
10214
10215# qhasm:         xmm3 ^= xmm8
10216# asm 1: pxor  <xmm8=int6464#10,<xmm3=int6464#4
10217# asm 2: pxor  <xmm8=%xmm9,<xmm3=%xmm3
10218pxor  %xmm9,%xmm3
10219
10220# qhasm:         xmm12 = xmm7
10221# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
10222# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
10223movdqa %xmm7,%xmm8
10224
10225# qhasm:         xmm8 = xmm1
10226# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10227# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10228movdqa %xmm1,%xmm9
10229
10230# qhasm:         xmm12 ^= xmm4
10231# asm 1: pxor  <xmm4=int6464#5,<xmm12=int6464#9
10232# asm 2: pxor  <xmm4=%xmm4,<xmm12=%xmm8
10233pxor  %xmm4,%xmm8
10234
10235# qhasm:         xmm8 ^= xmm2
10236# asm 1: pxor  <xmm2=int6464#3,<xmm8=int6464#10
10237# asm 2: pxor  <xmm2=%xmm2,<xmm8=%xmm9
10238pxor  %xmm2,%xmm9
10239
10240# qhasm:           xmm11 = xmm15
10241# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10242# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10243movdqa %xmm13,%xmm10
10244
10245# qhasm:           xmm11 ^= xmm14
10246# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
10247# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
10248pxor  %xmm11,%xmm10
10249
10250# qhasm:           xmm11 &= xmm12
10251# asm 1: pand  <xmm12=int6464#9,<xmm11=int6464#11
10252# asm 2: pand  <xmm12=%xmm8,<xmm11=%xmm10
10253pand  %xmm8,%xmm10
10254
10255# qhasm:           xmm12 ^= xmm8
10256# asm 1: pxor  <xmm8=int6464#10,<xmm12=int6464#9
10257# asm 2: pxor  <xmm8=%xmm9,<xmm12=%xmm8
10258pxor  %xmm9,%xmm8
10259
10260# qhasm:           xmm12 &= xmm14
10261# asm 1: pand  <xmm14=int6464#12,<xmm12=int6464#9
10262# asm 2: pand  <xmm14=%xmm11,<xmm12=%xmm8
10263pand  %xmm11,%xmm8
10264
10265# qhasm:           xmm8 &= xmm15
10266# asm 1: pand  <xmm15=int6464#14,<xmm8=int6464#10
10267# asm 2: pand  <xmm15=%xmm13,<xmm8=%xmm9
10268pand  %xmm13,%xmm9
10269
10270# qhasm:           xmm8 ^= xmm12
10271# asm 1: pxor  <xmm12=int6464#9,<xmm8=int6464#10
10272# asm 2: pxor  <xmm12=%xmm8,<xmm8=%xmm9
10273pxor  %xmm8,%xmm9
10274
10275# qhasm:           xmm12 ^= xmm11
10276# asm 1: pxor  <xmm11=int6464#11,<xmm12=int6464#9
10277# asm 2: pxor  <xmm11=%xmm10,<xmm12=%xmm8
10278pxor  %xmm10,%xmm8
10279
10280# qhasm:           xmm10 = xmm13
10281# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10282# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10283movdqa %xmm15,%xmm10
10284
10285# qhasm:           xmm10 ^= xmm9
10286# asm 1: pxor  <xmm9=int6464#13,<xmm10=int6464#11
10287# asm 2: pxor  <xmm9=%xmm12,<xmm10=%xmm10
10288pxor  %xmm12,%xmm10
10289
10290# qhasm:           xmm10 &= xmm4
10291# asm 1: pand  <xmm4=int6464#5,<xmm10=int6464#11
10292# asm 2: pand  <xmm4=%xmm4,<xmm10=%xmm10
10293pand  %xmm4,%xmm10
10294
10295# qhasm:           xmm4 ^= xmm2
10296# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
10297# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
10298pxor  %xmm2,%xmm4
10299
10300# qhasm:           xmm4 &= xmm9
10301# asm 1: pand  <xmm9=int6464#13,<xmm4=int6464#5
10302# asm 2: pand  <xmm9=%xmm12,<xmm4=%xmm4
10303pand  %xmm12,%xmm4
10304
10305# qhasm:           xmm2 &= xmm13
10306# asm 1: pand  <xmm13=int6464#16,<xmm2=int6464#3
10307# asm 2: pand  <xmm13=%xmm15,<xmm2=%xmm2
10308pand  %xmm15,%xmm2
10309
10310# qhasm:           xmm4 ^= xmm2
10311# asm 1: pxor  <xmm2=int6464#3,<xmm4=int6464#5
10312# asm 2: pxor  <xmm2=%xmm2,<xmm4=%xmm4
10313pxor  %xmm2,%xmm4
10314
10315# qhasm:           xmm2 ^= xmm10
10316# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#3
10317# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm2
10318pxor  %xmm10,%xmm2
10319
10320# qhasm:         xmm15 ^= xmm13
10321# asm 1: pxor  <xmm13=int6464#16,<xmm15=int6464#14
10322# asm 2: pxor  <xmm13=%xmm15,<xmm15=%xmm13
10323pxor  %xmm15,%xmm13
10324
10325# qhasm:         xmm14 ^= xmm9
10326# asm 1: pxor  <xmm9=int6464#13,<xmm14=int6464#12
10327# asm 2: pxor  <xmm9=%xmm12,<xmm14=%xmm11
10328pxor  %xmm12,%xmm11
10329
10330# qhasm:           xmm11 = xmm15
10331# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10332# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10333movdqa %xmm13,%xmm10
10334
10335# qhasm:           xmm11 ^= xmm14
10336# asm 1: pxor  <xmm14=int6464#12,<xmm11=int6464#11
10337# asm 2: pxor  <xmm14=%xmm11,<xmm11=%xmm10
10338pxor  %xmm11,%xmm10
10339
10340# qhasm:           xmm11 &= xmm7
10341# asm 1: pand  <xmm7=int6464#8,<xmm11=int6464#11
10342# asm 2: pand  <xmm7=%xmm7,<xmm11=%xmm10
10343pand  %xmm7,%xmm10
10344
10345# qhasm:           xmm7 ^= xmm1
10346# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
10347# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
10348pxor  %xmm1,%xmm7
10349
10350# qhasm:           xmm7 &= xmm14
10351# asm 1: pand  <xmm14=int6464#12,<xmm7=int6464#8
10352# asm 2: pand  <xmm14=%xmm11,<xmm7=%xmm7
10353pand  %xmm11,%xmm7
10354
10355# qhasm:           xmm1 &= xmm15
10356# asm 1: pand  <xmm15=int6464#14,<xmm1=int6464#2
10357# asm 2: pand  <xmm15=%xmm13,<xmm1=%xmm1
10358pand  %xmm13,%xmm1
10359
10360# qhasm:           xmm7 ^= xmm1
10361# asm 1: pxor  <xmm1=int6464#2,<xmm7=int6464#8
10362# asm 2: pxor  <xmm1=%xmm1,<xmm7=%xmm7
10363pxor  %xmm1,%xmm7
10364
10365# qhasm:           xmm1 ^= xmm11
10366# asm 1: pxor  <xmm11=int6464#11,<xmm1=int6464#2
10367# asm 2: pxor  <xmm11=%xmm10,<xmm1=%xmm1
10368pxor  %xmm10,%xmm1
10369
10370# qhasm:         xmm7 ^= xmm12
10371# asm 1: pxor  <xmm12=int6464#9,<xmm7=int6464#8
10372# asm 2: pxor  <xmm12=%xmm8,<xmm7=%xmm7
10373pxor  %xmm8,%xmm7
10374
10375# qhasm:         xmm4 ^= xmm12
10376# asm 1: pxor  <xmm12=int6464#9,<xmm4=int6464#5
10377# asm 2: pxor  <xmm12=%xmm8,<xmm4=%xmm4
10378pxor  %xmm8,%xmm4
10379
10380# qhasm:         xmm1 ^= xmm8
10381# asm 1: pxor  <xmm8=int6464#10,<xmm1=int6464#2
10382# asm 2: pxor  <xmm8=%xmm9,<xmm1=%xmm1
10383pxor  %xmm9,%xmm1
10384
10385# qhasm:         xmm2 ^= xmm8
10386# asm 1: pxor  <xmm8=int6464#10,<xmm2=int6464#3
10387# asm 2: pxor  <xmm8=%xmm9,<xmm2=%xmm2
10388pxor  %xmm9,%xmm2
10389
10390# qhasm:       xmm7 ^= xmm0
10391# asm 1: pxor  <xmm0=int6464#1,<xmm7=int6464#8
10392# asm 2: pxor  <xmm0=%xmm0,<xmm7=%xmm7
10393pxor  %xmm0,%xmm7
10394
10395# qhasm:       xmm1 ^= xmm6
10396# asm 1: pxor  <xmm6=int6464#7,<xmm1=int6464#2
10397# asm 2: pxor  <xmm6=%xmm6,<xmm1=%xmm1
10398pxor  %xmm6,%xmm1
10399
10400# qhasm:       xmm4 ^= xmm7
10401# asm 1: pxor  <xmm7=int6464#8,<xmm4=int6464#5
10402# asm 2: pxor  <xmm7=%xmm7,<xmm4=%xmm4
10403pxor  %xmm7,%xmm4
10404
10405# qhasm:       xmm6 ^= xmm0
10406# asm 1: pxor  <xmm0=int6464#1,<xmm6=int6464#7
10407# asm 2: pxor  <xmm0=%xmm0,<xmm6=%xmm6
10408pxor  %xmm0,%xmm6
10409
10410# qhasm:       xmm0 ^= xmm1
10411# asm 1: pxor  <xmm1=int6464#2,<xmm0=int6464#1
10412# asm 2: pxor  <xmm1=%xmm1,<xmm0=%xmm0
10413pxor  %xmm1,%xmm0
10414
10415# qhasm:       xmm1 ^= xmm5
10416# asm 1: pxor  <xmm5=int6464#6,<xmm1=int6464#2
10417# asm 2: pxor  <xmm5=%xmm5,<xmm1=%xmm1
10418pxor  %xmm5,%xmm1
10419
10420# qhasm:       xmm5 ^= xmm2
10421# asm 1: pxor  <xmm2=int6464#3,<xmm5=int6464#6
10422# asm 2: pxor  <xmm2=%xmm2,<xmm5=%xmm5
10423pxor  %xmm2,%xmm5
10424
10425# qhasm:       xmm4 ^= xmm5
10426# asm 1: pxor  <xmm5=int6464#6,<xmm4=int6464#5
10427# asm 2: pxor  <xmm5=%xmm5,<xmm4=%xmm4
10428pxor  %xmm5,%xmm4
10429
10430# qhasm:       xmm2 ^= xmm3
10431# asm 1: pxor  <xmm3=int6464#4,<xmm2=int6464#3
10432# asm 2: pxor  <xmm3=%xmm3,<xmm2=%xmm2
10433pxor  %xmm3,%xmm2
10434
10435# qhasm:       xmm3 ^= xmm5
10436# asm 1: pxor  <xmm5=int6464#6,<xmm3=int6464#4
10437# asm 2: pxor  <xmm5=%xmm5,<xmm3=%xmm3
10438pxor  %xmm5,%xmm3
10439
10440# qhasm:       xmm6 ^= xmm3
10441# asm 1: pxor  <xmm3=int6464#4,<xmm6=int6464#7
10442# asm 2: pxor  <xmm3=%xmm3,<xmm6=%xmm6
10443pxor  %xmm3,%xmm6
10444
10445# qhasm:     xmm8 = shuffle dwords of xmm0 by 0x93
10446# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
10447# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
10448pshufd $0x93,%xmm0,%xmm8
10449
10450# qhasm:     xmm9 = shuffle dwords of xmm1 by 0x93
10451# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
10452# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
10453pshufd $0x93,%xmm1,%xmm9
10454
10455# qhasm:     xmm10 = shuffle dwords of xmm4 by 0x93
10456# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
10457# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
10458pshufd $0x93,%xmm4,%xmm10
10459
10460# qhasm:     xmm11 = shuffle dwords of xmm6 by 0x93
10461# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
10462# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
10463pshufd $0x93,%xmm6,%xmm11
10464
10465# qhasm:     xmm12 = shuffle dwords of xmm3 by 0x93
10466# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
10467# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
10468pshufd $0x93,%xmm3,%xmm12
10469
10470# qhasm:     xmm13 = shuffle dwords of xmm7 by 0x93
10471# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
10472# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
10473pshufd $0x93,%xmm7,%xmm13
10474
10475# qhasm:     xmm14 = shuffle dwords of xmm2 by 0x93
10476# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
10477# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
10478pshufd $0x93,%xmm2,%xmm14
10479
10480# qhasm:     xmm15 = shuffle dwords of xmm5 by 0x93
10481# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
10482# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
10483pshufd $0x93,%xmm5,%xmm15
10484
10485# qhasm:     xmm0 ^= xmm8
10486# asm 1: pxor  <xmm8=int6464#9,<xmm0=int6464#1
10487# asm 2: pxor  <xmm8=%xmm8,<xmm0=%xmm0
10488pxor  %xmm8,%xmm0
10489
10490# qhasm:     xmm1 ^= xmm9
10491# asm 1: pxor  <xmm9=int6464#10,<xmm1=int6464#2
10492# asm 2: pxor  <xmm9=%xmm9,<xmm1=%xmm1
10493pxor  %xmm9,%xmm1
10494
10495# qhasm:     xmm4 ^= xmm10
10496# asm 1: pxor  <xmm10=int6464#11,<xmm4=int6464#5
10497# asm 2: pxor  <xmm10=%xmm10,<xmm4=%xmm4
10498pxor  %xmm10,%xmm4
10499
10500# qhasm:     xmm6 ^= xmm11
10501# asm 1: pxor  <xmm11=int6464#12,<xmm6=int6464#7
10502# asm 2: pxor  <xmm11=%xmm11,<xmm6=%xmm6
10503pxor  %xmm11,%xmm6
10504
10505# qhasm:     xmm3 ^= xmm12
10506# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#4
10507# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm3
10508pxor  %xmm12,%xmm3
10509
10510# qhasm:     xmm7 ^= xmm13
10511# asm 1: pxor  <xmm13=int6464#14,<xmm7=int6464#8
10512# asm 2: pxor  <xmm13=%xmm13,<xmm7=%xmm7
10513pxor  %xmm13,%xmm7
10514
10515# qhasm:     xmm2 ^= xmm14
10516# asm 1: pxor  <xmm14=int6464#15,<xmm2=int6464#3
10517# asm 2: pxor  <xmm14=%xmm14,<xmm2=%xmm2
10518pxor  %xmm14,%xmm2
10519
10520# qhasm:     xmm5 ^= xmm15
10521# asm 1: pxor  <xmm15=int6464#16,<xmm5=int6464#6
10522# asm 2: pxor  <xmm15=%xmm15,<xmm5=%xmm5
10523pxor  %xmm15,%xmm5
10524
10525# qhasm:     xmm8 ^= xmm5
10526# asm 1: pxor  <xmm5=int6464#6,<xmm8=int6464#9
10527# asm 2: pxor  <xmm5=%xmm5,<xmm8=%xmm8
10528pxor  %xmm5,%xmm8
10529
10530# qhasm:     xmm9 ^= xmm0
10531# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
10532# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
10533pxor  %xmm0,%xmm9
10534
10535# qhasm:     xmm10 ^= xmm1
10536# asm 1: pxor  <xmm1=int6464#2,<xmm10=int6464#11
10537# asm 2: pxor  <xmm1=%xmm1,<xmm10=%xmm10
10538pxor  %xmm1,%xmm10
10539
10540# qhasm:     xmm9 ^= xmm5
10541# asm 1: pxor  <xmm5=int6464#6,<xmm9=int6464#10
10542# asm 2: pxor  <xmm5=%xmm5,<xmm9=%xmm9
10543pxor  %xmm5,%xmm9
10544
10545# qhasm:     xmm11 ^= xmm4
10546# asm 1: pxor  <xmm4=int6464#5,<xmm11=int6464#12
10547# asm 2: pxor  <xmm4=%xmm4,<xmm11=%xmm11
10548pxor  %xmm4,%xmm11
10549
10550# qhasm:     xmm12 ^= xmm6
10551# asm 1: pxor  <xmm6=int6464#7,<xmm12=int6464#13
10552# asm 2: pxor  <xmm6=%xmm6,<xmm12=%xmm12
10553pxor  %xmm6,%xmm12
10554
10555# qhasm:     xmm13 ^= xmm3
10556# asm 1: pxor  <xmm3=int6464#4,<xmm13=int6464#14
10557# asm 2: pxor  <xmm3=%xmm3,<xmm13=%xmm13
10558pxor  %xmm3,%xmm13
10559
10560# qhasm:     xmm11 ^= xmm5
10561# asm 1: pxor  <xmm5=int6464#6,<xmm11=int6464#12
10562# asm 2: pxor  <xmm5=%xmm5,<xmm11=%xmm11
10563pxor  %xmm5,%xmm11
10564
10565# qhasm:     xmm14 ^= xmm7
10566# asm 1: pxor  <xmm7=int6464#8,<xmm14=int6464#15
10567# asm 2: pxor  <xmm7=%xmm7,<xmm14=%xmm14
10568pxor  %xmm7,%xmm14
10569
10570# qhasm:     xmm15 ^= xmm2
10571# asm 1: pxor  <xmm2=int6464#3,<xmm15=int6464#16
10572# asm 2: pxor  <xmm2=%xmm2,<xmm15=%xmm15
10573pxor  %xmm2,%xmm15
10574
10575# qhasm:     xmm12 ^= xmm5
10576# asm 1: pxor  <xmm5=int6464#6,<xmm12=int6464#13
10577# asm 2: pxor  <xmm5=%xmm5,<xmm12=%xmm12
10578pxor  %xmm5,%xmm12
10579
10580# qhasm:     xmm0 = shuffle dwords of xmm0 by 0x4E
10581# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
10582# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
10583pshufd $0x4E,%xmm0,%xmm0
10584
10585# qhasm:     xmm1 = shuffle dwords of xmm1 by 0x4E
10586# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
10587# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
10588pshufd $0x4E,%xmm1,%xmm1
10589
10590# qhasm:     xmm4 = shuffle dwords of xmm4 by 0x4E
10591# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
10592# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
10593pshufd $0x4E,%xmm4,%xmm4
10594
10595# qhasm:     xmm6 = shuffle dwords of xmm6 by 0x4E
10596# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
10597# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
10598pshufd $0x4E,%xmm6,%xmm6
10599
10600# qhasm:     xmm3 = shuffle dwords of xmm3 by 0x4E
10601# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
10602# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
10603pshufd $0x4E,%xmm3,%xmm3
10604
10605# qhasm:     xmm7 = shuffle dwords of xmm7 by 0x4E
10606# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
10607# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
10608pshufd $0x4E,%xmm7,%xmm7
10609
10610# qhasm:     xmm2 = shuffle dwords of xmm2 by 0x4E
10611# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
10612# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
10613pshufd $0x4E,%xmm2,%xmm2
10614
10615# qhasm:     xmm5 = shuffle dwords of xmm5 by 0x4E
10616# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
10617# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
10618pshufd $0x4E,%xmm5,%xmm5
10619
10620# qhasm:     xmm8 ^= xmm0
10621# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
10622# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
10623pxor  %xmm0,%xmm8
10624
10625# qhasm:     xmm9 ^= xmm1
10626# asm 1: pxor  <xmm1=int6464#2,<xmm9=int6464#10
10627# asm 2: pxor  <xmm1=%xmm1,<xmm9=%xmm9
10628pxor  %xmm1,%xmm9
10629
10630# qhasm:     xmm10 ^= xmm4
10631# asm 1: pxor  <xmm4=int6464#5,<xmm10=int6464#11
10632# asm 2: pxor  <xmm4=%xmm4,<xmm10=%xmm10
10633pxor  %xmm4,%xmm10
10634
10635# qhasm:     xmm11 ^= xmm6
10636# asm 1: pxor  <xmm6=int6464#7,<xmm11=int6464#12
10637# asm 2: pxor  <xmm6=%xmm6,<xmm11=%xmm11
10638pxor  %xmm6,%xmm11
10639
10640# qhasm:     xmm12 ^= xmm3
10641# asm 1: pxor  <xmm3=int6464#4,<xmm12=int6464#13
10642# asm 2: pxor  <xmm3=%xmm3,<xmm12=%xmm12
10643pxor  %xmm3,%xmm12
10644
10645# qhasm:     xmm13 ^= xmm7
10646# asm 1: pxor  <xmm7=int6464#8,<xmm13=int6464#14
10647# asm 2: pxor  <xmm7=%xmm7,<xmm13=%xmm13
10648pxor  %xmm7,%xmm13
10649
10650# qhasm:     xmm14 ^= xmm2
10651# asm 1: pxor  <xmm2=int6464#3,<xmm14=int6464#15
10652# asm 2: pxor  <xmm2=%xmm2,<xmm14=%xmm14
10653pxor  %xmm2,%xmm14
10654
10655# qhasm:     xmm15 ^= xmm5
10656# asm 1: pxor  <xmm5=int6464#6,<xmm15=int6464#16
10657# asm 2: pxor  <xmm5=%xmm5,<xmm15=%xmm15
10658pxor  %xmm5,%xmm15
10659
10660# qhasm:     xmm8 ^= *(int128 *)(c + 1152)
10661# asm 1: pxor 1152(<c=int64#4),<xmm8=int6464#9
10662# asm 2: pxor 1152(<c=%rcx),<xmm8=%xmm8
10663pxor 1152(%rcx),%xmm8
10664
10665# qhasm:     shuffle bytes of xmm8 by SRM0
10666# asm 1: pshufb SRM0,<xmm8=int6464#9
10667# asm 2: pshufb SRM0,<xmm8=%xmm8
10668pshufb SRM0,%xmm8
10669
10670# qhasm:     xmm9 ^= *(int128 *)(c + 1168)
10671# asm 1: pxor 1168(<c=int64#4),<xmm9=int6464#10
10672# asm 2: pxor 1168(<c=%rcx),<xmm9=%xmm9
10673pxor 1168(%rcx),%xmm9
10674
10675# qhasm:     shuffle bytes of xmm9 by SRM0
10676# asm 1: pshufb SRM0,<xmm9=int6464#10
10677# asm 2: pshufb SRM0,<xmm9=%xmm9
10678pshufb SRM0,%xmm9
10679
10680# qhasm:     xmm10 ^= *(int128 *)(c + 1184)
10681# asm 1: pxor 1184(<c=int64#4),<xmm10=int6464#11
10682# asm 2: pxor 1184(<c=%rcx),<xmm10=%xmm10
10683pxor 1184(%rcx),%xmm10
10684
10685# qhasm:     shuffle bytes of xmm10 by SRM0
10686# asm 1: pshufb SRM0,<xmm10=int6464#11
10687# asm 2: pshufb SRM0,<xmm10=%xmm10
10688pshufb SRM0,%xmm10
10689
10690# qhasm:     xmm11 ^= *(int128 *)(c + 1200)
10691# asm 1: pxor 1200(<c=int64#4),<xmm11=int6464#12
10692# asm 2: pxor 1200(<c=%rcx),<xmm11=%xmm11
10693pxor 1200(%rcx),%xmm11
10694
10695# qhasm:     shuffle bytes of xmm11 by SRM0
10696# asm 1: pshufb SRM0,<xmm11=int6464#12
10697# asm 2: pshufb SRM0,<xmm11=%xmm11
10698pshufb SRM0,%xmm11
10699
10700# qhasm:     xmm12 ^= *(int128 *)(c + 1216)
10701# asm 1: pxor 1216(<c=int64#4),<xmm12=int6464#13
10702# asm 2: pxor 1216(<c=%rcx),<xmm12=%xmm12
10703pxor 1216(%rcx),%xmm12
10704
10705# qhasm:     shuffle bytes of xmm12 by SRM0
10706# asm 1: pshufb SRM0,<xmm12=int6464#13
10707# asm 2: pshufb SRM0,<xmm12=%xmm12
10708pshufb SRM0,%xmm12
10709
10710# qhasm:     xmm13 ^= *(int128 *)(c + 1232)
10711# asm 1: pxor 1232(<c=int64#4),<xmm13=int6464#14
10712# asm 2: pxor 1232(<c=%rcx),<xmm13=%xmm13
10713pxor 1232(%rcx),%xmm13
10714
10715# qhasm:     shuffle bytes of xmm13 by SRM0
10716# asm 1: pshufb SRM0,<xmm13=int6464#14
10717# asm 2: pshufb SRM0,<xmm13=%xmm13
10718pshufb SRM0,%xmm13
10719
10720# qhasm:     xmm14 ^= *(int128 *)(c + 1248)
10721# asm 1: pxor 1248(<c=int64#4),<xmm14=int6464#15
10722# asm 2: pxor 1248(<c=%rcx),<xmm14=%xmm14
10723pxor 1248(%rcx),%xmm14
10724
10725# qhasm:     shuffle bytes of xmm14 by SRM0
10726# asm 1: pshufb SRM0,<xmm14=int6464#15
10727# asm 2: pshufb SRM0,<xmm14=%xmm14
10728pshufb SRM0,%xmm14
10729
10730# qhasm:     xmm15 ^= *(int128 *)(c + 1264)
10731# asm 1: pxor 1264(<c=int64#4),<xmm15=int6464#16
10732# asm 2: pxor 1264(<c=%rcx),<xmm15=%xmm15
10733pxor 1264(%rcx),%xmm15
10734
10735# qhasm:     shuffle bytes of xmm15 by SRM0
10736# asm 1: pshufb SRM0,<xmm15=int6464#16
10737# asm 2: pshufb SRM0,<xmm15=%xmm15
10738pshufb SRM0,%xmm15
10739
10740# qhasm:       xmm13 ^= xmm14
10741# asm 1: pxor  <xmm14=int6464#15,<xmm13=int6464#14
10742# asm 2: pxor  <xmm14=%xmm14,<xmm13=%xmm13
10743pxor  %xmm14,%xmm13
10744
10745# qhasm:       xmm10 ^= xmm9
10746# asm 1: pxor  <xmm9=int6464#10,<xmm10=int6464#11
10747# asm 2: pxor  <xmm9=%xmm9,<xmm10=%xmm10
10748pxor  %xmm9,%xmm10
10749
10750# qhasm:       xmm13 ^= xmm8
10751# asm 1: pxor  <xmm8=int6464#9,<xmm13=int6464#14
10752# asm 2: pxor  <xmm8=%xmm8,<xmm13=%xmm13
10753pxor  %xmm8,%xmm13
10754
10755# qhasm:       xmm14 ^= xmm10
10756# asm 1: pxor  <xmm10=int6464#11,<xmm14=int6464#15
10757# asm 2: pxor  <xmm10=%xmm10,<xmm14=%xmm14
10758pxor  %xmm10,%xmm14
10759
10760# qhasm:       xmm11 ^= xmm8
10761# asm 1: pxor  <xmm8=int6464#9,<xmm11=int6464#12
10762# asm 2: pxor  <xmm8=%xmm8,<xmm11=%xmm11
10763pxor  %xmm8,%xmm11
10764
10765# qhasm:       xmm14 ^= xmm11
10766# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
10767# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
10768pxor  %xmm11,%xmm14
10769
10770# qhasm:       xmm11 ^= xmm15
10771# asm 1: pxor  <xmm15=int6464#16,<xmm11=int6464#12
10772# asm 2: pxor  <xmm15=%xmm15,<xmm11=%xmm11
10773pxor  %xmm15,%xmm11
10774
10775# qhasm:       xmm11 ^= xmm12
10776# asm 1: pxor  <xmm12=int6464#13,<xmm11=int6464#12
10777# asm 2: pxor  <xmm12=%xmm12,<xmm11=%xmm11
10778pxor  %xmm12,%xmm11
10779
10780# qhasm:       xmm15 ^= xmm13
10781# asm 1: pxor  <xmm13=int6464#14,<xmm15=int6464#16
10782# asm 2: pxor  <xmm13=%xmm13,<xmm15=%xmm15
10783pxor  %xmm13,%xmm15
10784
10785# qhasm:       xmm11 ^= xmm9
10786# asm 1: pxor  <xmm9=int6464#10,<xmm11=int6464#12
10787# asm 2: pxor  <xmm9=%xmm9,<xmm11=%xmm11
10788pxor  %xmm9,%xmm11
10789
10790# qhasm:       xmm12 ^= xmm13
10791# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
10792# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
10793pxor  %xmm13,%xmm12
10794
10795# qhasm:       xmm10 ^= xmm15
10796# asm 1: pxor  <xmm15=int6464#16,<xmm10=int6464#11
10797# asm 2: pxor  <xmm15=%xmm15,<xmm10=%xmm10
10798pxor  %xmm15,%xmm10
10799
10800# qhasm:       xmm9 ^= xmm13
10801# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
10802# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
10803pxor  %xmm13,%xmm9
10804
10805# qhasm:       xmm3 = xmm15
10806# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
10807# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
10808movdqa %xmm15,%xmm0
10809
10810# qhasm:       xmm2 = xmm9
10811# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
10812# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
10813movdqa %xmm9,%xmm1
10814
10815# qhasm:       xmm1 = xmm13
10816# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
10817# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
10818movdqa %xmm13,%xmm2
10819
10820# qhasm:       xmm5 = xmm10
10821# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
10822# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
10823movdqa %xmm10,%xmm3
10824
10825# qhasm:       xmm4 = xmm14
10826# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
10827# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
10828movdqa %xmm14,%xmm4
10829
10830# qhasm:       xmm3 ^= xmm12
10831# asm 1: pxor  <xmm12=int6464#13,<xmm3=int6464#1
10832# asm 2: pxor  <xmm12=%xmm12,<xmm3=%xmm0
10833pxor  %xmm12,%xmm0
10834
10835# qhasm:       xmm2 ^= xmm10
10836# asm 1: pxor  <xmm10=int6464#11,<xmm2=int6464#2
10837# asm 2: pxor  <xmm10=%xmm10,<xmm2=%xmm1
10838pxor  %xmm10,%xmm1
10839
10840# qhasm:       xmm1 ^= xmm11
10841# asm 1: pxor  <xmm11=int6464#12,<xmm1=int6464#3
10842# asm 2: pxor  <xmm11=%xmm11,<xmm1=%xmm2
10843pxor  %xmm11,%xmm2
10844
10845# qhasm:       xmm5 ^= xmm12
10846# asm 1: pxor  <xmm12=int6464#13,<xmm5=int6464#4
10847# asm 2: pxor  <xmm12=%xmm12,<xmm5=%xmm3
10848pxor  %xmm12,%xmm3
10849
10850# qhasm:       xmm4 ^= xmm8
10851# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#5
10852# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm4
10853pxor  %xmm8,%xmm4
10854
10855# qhasm:       xmm6 = xmm3
10856# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
10857# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
10858movdqa %xmm0,%xmm5
10859
10860# qhasm:       xmm0 = xmm2
10861# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
10862# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
10863movdqa %xmm1,%xmm6
10864
10865# qhasm:       xmm7 = xmm3
10866# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
10867# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
10868movdqa %xmm0,%xmm7
10869
10870# qhasm:       xmm2 |= xmm1
10871# asm 1: por   <xmm1=int6464#3,<xmm2=int6464#2
10872# asm 2: por   <xmm1=%xmm2,<xmm2=%xmm1
10873por   %xmm2,%xmm1
10874
10875# qhasm:       xmm3 |= xmm4
10876# asm 1: por   <xmm4=int6464#5,<xmm3=int6464#1
10877# asm 2: por   <xmm4=%xmm4,<xmm3=%xmm0
10878por   %xmm4,%xmm0
10879
10880# qhasm:       xmm7 ^= xmm0
10881# asm 1: pxor  <xmm0=int6464#7,<xmm7=int6464#8
10882# asm 2: pxor  <xmm0=%xmm6,<xmm7=%xmm7
10883pxor  %xmm6,%xmm7
10884
10885# qhasm:       xmm6 &= xmm4
10886# asm 1: pand  <xmm4=int6464#5,<xmm6=int6464#6
10887# asm 2: pand  <xmm4=%xmm4,<xmm6=%xmm5
10888pand  %xmm4,%xmm5
10889
10890# qhasm:       xmm0 &= xmm1
10891# asm 1: pand  <xmm1=int6464#3,<xmm0=int6464#7
10892# asm 2: pand  <xmm1=%xmm2,<xmm0=%xmm6
10893pand  %xmm2,%xmm6
10894
10895# qhasm:       xmm4 ^= xmm1
10896# asm 1: pxor  <xmm1=int6464#3,<xmm4=int6464#5
10897# asm 2: pxor  <xmm1=%xmm2,<xmm4=%xmm4
10898pxor  %xmm2,%xmm4
10899
10900# qhasm:       xmm7 &= xmm4
10901# asm 1: pand  <xmm4=int6464#5,<xmm7=int6464#8
10902# asm 2: pand  <xmm4=%xmm4,<xmm7=%xmm7
10903pand  %xmm4,%xmm7
10904
10905# qhasm:       xmm4 = xmm11
10906# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
10907# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
10908movdqa %xmm11,%xmm2
10909
10910# qhasm:       xmm4 ^= xmm8
10911# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#3
10912# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm2
10913pxor  %xmm8,%xmm2
10914
10915# qhasm:       xmm5 &= xmm4
10916# asm 1: pand  <xmm4=int6464#3,<xmm5=int6464#4
10917# asm 2: pand  <xmm4=%xmm2,<xmm5=%xmm3
10918pand  %xmm2,%xmm3
10919
10920# qhasm:       xmm3 ^= xmm5
10921# asm 1: pxor  <xmm5=int6464#4,<xmm3=int6464#1
10922# asm 2: pxor  <xmm5=%xmm3,<xmm3=%xmm0
10923pxor  %xmm3,%xmm0
10924
10925# qhasm:       xmm2 ^= xmm5
10926# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
10927# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
10928pxor  %xmm3,%xmm1
10929
10930# qhasm:       xmm5 = xmm15
10931# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
10932# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
10933movdqa %xmm15,%xmm2
10934
10935# qhasm:       xmm5 ^= xmm9
10936# asm 1: pxor  <xmm9=int6464#10,<xmm5=int6464#3
10937# asm 2: pxor  <xmm9=%xmm9,<xmm5=%xmm2
10938pxor  %xmm9,%xmm2
10939
10940# qhasm:       xmm4 = xmm13
10941# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
10942# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
10943movdqa %xmm13,%xmm3
10944
10945# qhasm:       xmm1 = xmm5
10946# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
10947# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
10948movdqa %xmm2,%xmm4
10949
10950# qhasm:       xmm4 ^= xmm14
10951# asm 1: pxor  <xmm14=int6464#15,<xmm4=int6464#4
10952# asm 2: pxor  <xmm14=%xmm14,<xmm4=%xmm3
10953pxor  %xmm14,%xmm3
10954
10955# qhasm:       xmm1 |= xmm4
10956# asm 1: por   <xmm4=int6464#4,<xmm1=int6464#5
10957# asm 2: por   <xmm4=%xmm3,<xmm1=%xmm4
10958por   %xmm3,%xmm4
10959
10960# qhasm:       xmm5 &= xmm4
10961# asm 1: pand  <xmm4=int6464#4,<xmm5=int6464#3
10962# asm 2: pand  <xmm4=%xmm3,<xmm5=%xmm2
10963pand  %xmm3,%xmm2
10964
10965# qhasm:       xmm0 ^= xmm5
10966# asm 1: pxor  <xmm5=int6464#3,<xmm0=int6464#7
10967# asm 2: pxor  <xmm5=%xmm2,<xmm0=%xmm6
10968pxor  %xmm2,%xmm6
10969
10970# qhasm:       xmm3 ^= xmm7
10971# asm 1: pxor  <xmm7=int6464#8,<xmm3=int6464#1
10972# asm 2: pxor  <xmm7=%xmm7,<xmm3=%xmm0
10973pxor  %xmm7,%xmm0
10974
10975# qhasm:       xmm2 ^= xmm6
10976# asm 1: pxor  <xmm6=int6464#6,<xmm2=int6464#2
10977# asm 2: pxor  <xmm6=%xmm5,<xmm2=%xmm1
10978pxor  %xmm5,%xmm1
10979
10980# qhasm:       xmm1 ^= xmm7
10981# asm 1: pxor  <xmm7=int6464#8,<xmm1=int6464#5
10982# asm 2: pxor  <xmm7=%xmm7,<xmm1=%xmm4
10983pxor  %xmm7,%xmm4
10984
10985# qhasm:       xmm0 ^= xmm6
10986# asm 1: pxor  <xmm6=int6464#6,<xmm0=int6464#7
10987# asm 2: pxor  <xmm6=%xmm5,<xmm0=%xmm6
10988pxor  %xmm5,%xmm6
10989
10990# qhasm:       xmm1 ^= xmm6
10991# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
10992# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
10993pxor  %xmm5,%xmm4
10994
10995# qhasm:       xmm4 = xmm10
10996# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
10997# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
10998movdqa %xmm10,%xmm2
10999
11000# qhasm:       xmm5 = xmm12
11001# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
11002# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
11003movdqa %xmm12,%xmm3
11004
11005# qhasm:       xmm6 = xmm9
11006# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
11007# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
11008movdqa %xmm9,%xmm5
11009
11010# qhasm:       xmm7 = xmm15
11011# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
11012# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
11013movdqa %xmm15,%xmm7
11014
11015# qhasm:       xmm4 &= xmm11
11016# asm 1: pand  <xmm11=int6464#12,<xmm4=int6464#3
11017# asm 2: pand  <xmm11=%xmm11,<xmm4=%xmm2
11018pand  %xmm11,%xmm2
11019
11020# qhasm:       xmm5 &= xmm8
11021# asm 1: pand  <xmm8=int6464#9,<xmm5=int6464#4
11022# asm 2: pand  <xmm8=%xmm8,<xmm5=%xmm3
11023pand  %xmm8,%xmm3
11024
11025# qhasm:       xmm6 &= xmm13
11026# asm 1: pand  <xmm13=int6464#14,<xmm6=int6464#6
11027# asm 2: pand  <xmm13=%xmm13,<xmm6=%xmm5
11028pand  %xmm13,%xmm5
11029
11030# qhasm:       xmm7 |= xmm14
11031# asm 1: por   <xmm14=int6464#15,<xmm7=int6464#8
11032# asm 2: por   <xmm14=%xmm14,<xmm7=%xmm7
11033por   %xmm14,%xmm7
11034
11035# qhasm:       xmm3 ^= xmm4
11036# asm 1: pxor  <xmm4=int6464#3,<xmm3=int6464#1
11037# asm 2: pxor  <xmm4=%xmm2,<xmm3=%xmm0
11038pxor  %xmm2,%xmm0
11039
11040# qhasm:       xmm2 ^= xmm5
11041# asm 1: pxor  <xmm5=int6464#4,<xmm2=int6464#2
11042# asm 2: pxor  <xmm5=%xmm3,<xmm2=%xmm1
11043pxor  %xmm3,%xmm1
11044
11045# qhasm:       xmm1 ^= xmm6
11046# asm 1: pxor  <xmm6=int6464#6,<xmm1=int6464#5
11047# asm 2: pxor  <xmm6=%xmm5,<xmm1=%xmm4
11048pxor  %xmm5,%xmm4
11049
11050# qhasm:       xmm0 ^= xmm7
11051# asm 1: pxor  <xmm7=int6464#8,<xmm0=int6464#7
11052# asm 2: pxor  <xmm7=%xmm7,<xmm0=%xmm6
11053pxor  %xmm7,%xmm6
11054
11055# qhasm:       xmm4 = xmm3
11056# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
11057# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
11058movdqa %xmm0,%xmm2
11059
11060# qhasm:       xmm4 ^= xmm2
11061# asm 1: pxor  <xmm2=int6464#2,<xmm4=int6464#3
11062# asm 2: pxor  <xmm2=%xmm1,<xmm4=%xmm2
11063pxor  %xmm1,%xmm2
11064
11065# qhasm:       xmm3 &= xmm1
11066# asm 1: pand  <xmm1=int6464#5,<xmm3=int6464#1
11067# asm 2: pand  <xmm1=%xmm4,<xmm3=%xmm0
11068pand  %xmm4,%xmm0
11069
11070# qhasm:       xmm6 = xmm0
11071# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
11072# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
11073movdqa %xmm6,%xmm3
11074
11075# qhasm:       xmm6 ^= xmm3
11076# asm 1: pxor  <xmm3=int6464#1,<xmm6=int6464#4
11077# asm 2: pxor  <xmm3=%xmm0,<xmm6=%xmm3
11078pxor  %xmm0,%xmm3
11079
11080# qhasm:       xmm7 = xmm4
11081# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
11082# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
11083movdqa %xmm2,%xmm5
11084
11085# qhasm:       xmm7 &= xmm6
11086# asm 1: pand  <xmm6=int6464#4,<xmm7=int6464#6
11087# asm 2: pand  <xmm6=%xmm3,<xmm7=%xmm5
11088pand  %xmm3,%xmm5
11089
11090# qhasm:       xmm7 ^= xmm2
11091# asm 1: pxor  <xmm2=int6464#2,<xmm7=int6464#6
11092# asm 2: pxor  <xmm2=%xmm1,<xmm7=%xmm5
11093pxor  %xmm1,%xmm5
11094
11095# qhasm:       xmm5 = xmm1
11096# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
11097# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
11098movdqa %xmm4,%xmm7
11099
11100# qhasm:       xmm5 ^= xmm0
11101# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
11102# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
11103pxor  %xmm6,%xmm7
11104
11105# qhasm:       xmm3 ^= xmm2
11106# asm 1: pxor  <xmm2=int6464#2,<xmm3=int6464#1
11107# asm 2: pxor  <xmm2=%xmm1,<xmm3=%xmm0
11108pxor  %xmm1,%xmm0
11109
11110# qhasm:       xmm5 &= xmm3
11111# asm 1: pand  <xmm3=int6464#1,<xmm5=int6464#8
11112# asm 2: pand  <xmm3=%xmm0,<xmm5=%xmm7
11113pand  %xmm0,%xmm7
11114
11115# qhasm:       xmm5 ^= xmm0
11116# asm 1: pxor  <xmm0=int6464#7,<xmm5=int6464#8
11117# asm 2: pxor  <xmm0=%xmm6,<xmm5=%xmm7
11118pxor  %xmm6,%xmm7
11119
11120# qhasm:       xmm1 ^= xmm5
11121# asm 1: pxor  <xmm5=int6464#8,<xmm1=int6464#5
11122# asm 2: pxor  <xmm5=%xmm7,<xmm1=%xmm4
11123pxor  %xmm7,%xmm4
11124
11125# qhasm:       xmm2 = xmm6
11126# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
11127# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
11128movdqa %xmm3,%xmm0
11129
11130# qhasm:       xmm2 ^= xmm5
11131# asm 1: pxor  <xmm5=int6464#8,<xmm2=int6464#1
11132# asm 2: pxor  <xmm5=%xmm7,<xmm2=%xmm0
11133pxor  %xmm7,%xmm0
11134
11135# qhasm:       xmm2 &= xmm0
11136# asm 1: pand  <xmm0=int6464#7,<xmm2=int6464#1
11137# asm 2: pand  <xmm0=%xmm6,<xmm2=%xmm0
11138pand  %xmm6,%xmm0
11139
11140# qhasm:       xmm1 ^= xmm2
11141# asm 1: pxor  <xmm2=int6464#1,<xmm1=int6464#5
11142# asm 2: pxor  <xmm2=%xmm0,<xmm1=%xmm4
11143pxor  %xmm0,%xmm4
11144
11145# qhasm:       xmm6 ^= xmm2
11146# asm 1: pxor  <xmm2=int6464#1,<xmm6=int6464#4
11147# asm 2: pxor  <xmm2=%xmm0,<xmm6=%xmm3
11148pxor  %xmm0,%xmm3
11149
11150# qhasm:       xmm6 &= xmm7
11151# asm 1: pand  <xmm7=int6464#6,<xmm6=int6464#4
11152# asm 2: pand  <xmm7=%xmm5,<xmm6=%xmm3
11153pand  %xmm5,%xmm3
11154
11155# qhasm:       xmm6 ^= xmm4
11156# asm 1: pxor  <xmm4=int6464#3,<xmm6=int6464#4
11157# asm 2: pxor  <xmm4=%xmm2,<xmm6=%xmm3
11158pxor  %xmm2,%xmm3
11159
11160# qhasm:         xmm4 = xmm14
11161# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
11162# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
11163movdqa %xmm14,%xmm0
11164
11165# qhasm:         xmm0 = xmm13
11166# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
11167# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
11168movdqa %xmm13,%xmm1
11169
11170# qhasm:           xmm2 = xmm7
11171# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
11172# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
11173movdqa %xmm5,%xmm2
11174
11175# qhasm:           xmm2 ^= xmm6
11176# asm 1: pxor  <xmm6=int6464#4,<xmm2=int6464#3
11177# asm 2: pxor  <xmm6=%xmm3,<xmm2=%xmm2
11178pxor  %xmm3,%xmm2
11179
11180# qhasm:           xmm2 &= xmm14
11181# asm 1: pand  <xmm14=int6464#15,<xmm2=int6464#3
11182# asm 2: pand  <xmm14=%xmm14,<xmm2=%xmm2
11183pand  %xmm14,%xmm2
11184
11185# qhasm:           xmm14 ^= xmm13
11186# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
11187# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
11188pxor  %xmm13,%xmm14
11189
11190# qhasm:           xmm14 &= xmm6
11191# asm 1: pand  <xmm6=int6464#4,<xmm14=int6464#15
11192# asm 2: pand  <xmm6=%xmm3,<xmm14=%xmm14
11193pand  %xmm3,%xmm14
11194
11195# qhasm:           xmm13 &= xmm7
11196# asm 1: pand  <xmm7=int6464#6,<xmm13=int6464#14
11197# asm 2: pand  <xmm7=%xmm5,<xmm13=%xmm13
11198pand  %xmm5,%xmm13
11199
11200# qhasm:           xmm14 ^= xmm13
11201# asm 1: pxor  <xmm13=int6464#14,<xmm14=int6464#15
11202# asm 2: pxor  <xmm13=%xmm13,<xmm14=%xmm14
11203pxor  %xmm13,%xmm14
11204
11205# qhasm:           xmm13 ^= xmm2
11206# asm 1: pxor  <xmm2=int6464#3,<xmm13=int6464#14
11207# asm 2: pxor  <xmm2=%xmm2,<xmm13=%xmm13
11208pxor  %xmm2,%xmm13
11209
11210# qhasm:         xmm4 ^= xmm8
11211# asm 1: pxor  <xmm8=int6464#9,<xmm4=int6464#1
11212# asm 2: pxor  <xmm8=%xmm8,<xmm4=%xmm0
11213pxor  %xmm8,%xmm0
11214
11215# qhasm:         xmm0 ^= xmm11
11216# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#2
11217# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm1
11218pxor  %xmm11,%xmm1
11219
11220# qhasm:         xmm7 ^= xmm5
11221# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
11222# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
11223pxor  %xmm7,%xmm5
11224
11225# qhasm:         xmm6 ^= xmm1
11226# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
11227# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
11228pxor  %xmm4,%xmm3
11229
11230# qhasm:           xmm3 = xmm7
11231# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11232# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11233movdqa %xmm5,%xmm2
11234
11235# qhasm:           xmm3 ^= xmm6
11236# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
11237# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
11238pxor  %xmm3,%xmm2
11239
11240# qhasm:           xmm3 &= xmm4
11241# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
11242# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
11243pand  %xmm0,%xmm2
11244
11245# qhasm:           xmm4 ^= xmm0
11246# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
11247# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
11248pxor  %xmm1,%xmm0
11249
11250# qhasm:           xmm4 &= xmm6
11251# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
11252# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
11253pand  %xmm3,%xmm0
11254
11255# qhasm:           xmm0 &= xmm7
11256# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
11257# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
11258pand  %xmm5,%xmm1
11259
11260# qhasm:           xmm0 ^= xmm4
11261# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
11262# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
11263pxor  %xmm0,%xmm1
11264
11265# qhasm:           xmm4 ^= xmm3
11266# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
11267# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
11268pxor  %xmm2,%xmm0
11269
11270# qhasm:           xmm2 = xmm5
11271# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11272# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11273movdqa %xmm7,%xmm2
11274
11275# qhasm:           xmm2 ^= xmm1
11276# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
11277# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
11278pxor  %xmm4,%xmm2
11279
11280# qhasm:           xmm2 &= xmm8
11281# asm 1: pand  <xmm8=int6464#9,<xmm2=int6464#3
11282# asm 2: pand  <xmm8=%xmm8,<xmm2=%xmm2
11283pand  %xmm8,%xmm2
11284
11285# qhasm:           xmm8 ^= xmm11
11286# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
11287# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
11288pxor  %xmm11,%xmm8
11289
11290# qhasm:           xmm8 &= xmm1
11291# asm 1: pand  <xmm1=int6464#5,<xmm8=int6464#9
11292# asm 2: pand  <xmm1=%xmm4,<xmm8=%xmm8
11293pand  %xmm4,%xmm8
11294
11295# qhasm:           xmm11 &= xmm5
11296# asm 1: pand  <xmm5=int6464#8,<xmm11=int6464#12
11297# asm 2: pand  <xmm5=%xmm7,<xmm11=%xmm11
11298pand  %xmm7,%xmm11
11299
11300# qhasm:           xmm8 ^= xmm11
11301# asm 1: pxor  <xmm11=int6464#12,<xmm8=int6464#9
11302# asm 2: pxor  <xmm11=%xmm11,<xmm8=%xmm8
11303pxor  %xmm11,%xmm8
11304
11305# qhasm:           xmm11 ^= xmm2
11306# asm 1: pxor  <xmm2=int6464#3,<xmm11=int6464#12
11307# asm 2: pxor  <xmm2=%xmm2,<xmm11=%xmm11
11308pxor  %xmm2,%xmm11
11309
11310# qhasm:         xmm14 ^= xmm4
11311# asm 1: pxor  <xmm4=int6464#1,<xmm14=int6464#15
11312# asm 2: pxor  <xmm4=%xmm0,<xmm14=%xmm14
11313pxor  %xmm0,%xmm14
11314
11315# qhasm:         xmm8 ^= xmm4
11316# asm 1: pxor  <xmm4=int6464#1,<xmm8=int6464#9
11317# asm 2: pxor  <xmm4=%xmm0,<xmm8=%xmm8
11318pxor  %xmm0,%xmm8
11319
11320# qhasm:         xmm13 ^= xmm0
11321# asm 1: pxor  <xmm0=int6464#2,<xmm13=int6464#14
11322# asm 2: pxor  <xmm0=%xmm1,<xmm13=%xmm13
11323pxor  %xmm1,%xmm13
11324
11325# qhasm:         xmm11 ^= xmm0
11326# asm 1: pxor  <xmm0=int6464#2,<xmm11=int6464#12
11327# asm 2: pxor  <xmm0=%xmm1,<xmm11=%xmm11
11328pxor  %xmm1,%xmm11
11329
11330# qhasm:         xmm4 = xmm15
11331# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
11332# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
11333movdqa %xmm15,%xmm0
11334
11335# qhasm:         xmm0 = xmm9
11336# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
11337# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
11338movdqa %xmm9,%xmm1
11339
11340# qhasm:         xmm4 ^= xmm12
11341# asm 1: pxor  <xmm12=int6464#13,<xmm4=int6464#1
11342# asm 2: pxor  <xmm12=%xmm12,<xmm4=%xmm0
11343pxor  %xmm12,%xmm0
11344
11345# qhasm:         xmm0 ^= xmm10
11346# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#2
11347# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm1
11348pxor  %xmm10,%xmm1
11349
11350# qhasm:           xmm3 = xmm7
11351# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11352# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11353movdqa %xmm5,%xmm2
11354
11355# qhasm:           xmm3 ^= xmm6
11356# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
11357# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
11358pxor  %xmm3,%xmm2
11359
11360# qhasm:           xmm3 &= xmm4
11361# asm 1: pand  <xmm4=int6464#1,<xmm3=int6464#3
11362# asm 2: pand  <xmm4=%xmm0,<xmm3=%xmm2
11363pand  %xmm0,%xmm2
11364
11365# qhasm:           xmm4 ^= xmm0
11366# asm 1: pxor  <xmm0=int6464#2,<xmm4=int6464#1
11367# asm 2: pxor  <xmm0=%xmm1,<xmm4=%xmm0
11368pxor  %xmm1,%xmm0
11369
11370# qhasm:           xmm4 &= xmm6
11371# asm 1: pand  <xmm6=int6464#4,<xmm4=int6464#1
11372# asm 2: pand  <xmm6=%xmm3,<xmm4=%xmm0
11373pand  %xmm3,%xmm0
11374
11375# qhasm:           xmm0 &= xmm7
11376# asm 1: pand  <xmm7=int6464#6,<xmm0=int6464#2
11377# asm 2: pand  <xmm7=%xmm5,<xmm0=%xmm1
11378pand  %xmm5,%xmm1
11379
11380# qhasm:           xmm0 ^= xmm4
11381# asm 1: pxor  <xmm4=int6464#1,<xmm0=int6464#2
11382# asm 2: pxor  <xmm4=%xmm0,<xmm0=%xmm1
11383pxor  %xmm0,%xmm1
11384
11385# qhasm:           xmm4 ^= xmm3
11386# asm 1: pxor  <xmm3=int6464#3,<xmm4=int6464#1
11387# asm 2: pxor  <xmm3=%xmm2,<xmm4=%xmm0
11388pxor  %xmm2,%xmm0
11389
11390# qhasm:           xmm2 = xmm5
11391# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11392# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11393movdqa %xmm7,%xmm2
11394
11395# qhasm:           xmm2 ^= xmm1
11396# asm 1: pxor  <xmm1=int6464#5,<xmm2=int6464#3
11397# asm 2: pxor  <xmm1=%xmm4,<xmm2=%xmm2
11398pxor  %xmm4,%xmm2
11399
11400# qhasm:           xmm2 &= xmm12
11401# asm 1: pand  <xmm12=int6464#13,<xmm2=int6464#3
11402# asm 2: pand  <xmm12=%xmm12,<xmm2=%xmm2
11403pand  %xmm12,%xmm2
11404
11405# qhasm:           xmm12 ^= xmm10
11406# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
11407# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
11408pxor  %xmm10,%xmm12
11409
11410# qhasm:           xmm12 &= xmm1
11411# asm 1: pand  <xmm1=int6464#5,<xmm12=int6464#13
11412# asm 2: pand  <xmm1=%xmm4,<xmm12=%xmm12
11413pand  %xmm4,%xmm12
11414
11415# qhasm:           xmm10 &= xmm5
11416# asm 1: pand  <xmm5=int6464#8,<xmm10=int6464#11
11417# asm 2: pand  <xmm5=%xmm7,<xmm10=%xmm10
11418pand  %xmm7,%xmm10
11419
11420# qhasm:           xmm12 ^= xmm10
11421# asm 1: pxor  <xmm10=int6464#11,<xmm12=int6464#13
11422# asm 2: pxor  <xmm10=%xmm10,<xmm12=%xmm12
11423pxor  %xmm10,%xmm12
11424
11425# qhasm:           xmm10 ^= xmm2
11426# asm 1: pxor  <xmm2=int6464#3,<xmm10=int6464#11
11427# asm 2: pxor  <xmm2=%xmm2,<xmm10=%xmm10
11428pxor  %xmm2,%xmm10
11429
11430# qhasm:         xmm7 ^= xmm5
11431# asm 1: pxor  <xmm5=int6464#8,<xmm7=int6464#6
11432# asm 2: pxor  <xmm5=%xmm7,<xmm7=%xmm5
11433pxor  %xmm7,%xmm5
11434
11435# qhasm:         xmm6 ^= xmm1
11436# asm 1: pxor  <xmm1=int6464#5,<xmm6=int6464#4
11437# asm 2: pxor  <xmm1=%xmm4,<xmm6=%xmm3
11438pxor  %xmm4,%xmm3
11439
11440# qhasm:           xmm3 = xmm7
11441# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11442# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11443movdqa %xmm5,%xmm2
11444
11445# qhasm:           xmm3 ^= xmm6
11446# asm 1: pxor  <xmm6=int6464#4,<xmm3=int6464#3
11447# asm 2: pxor  <xmm6=%xmm3,<xmm3=%xmm2
11448pxor  %xmm3,%xmm2
11449
11450# qhasm:           xmm3 &= xmm15
11451# asm 1: pand  <xmm15=int6464#16,<xmm3=int6464#3
11452# asm 2: pand  <xmm15=%xmm15,<xmm3=%xmm2
11453pand  %xmm15,%xmm2
11454
11455# qhasm:           xmm15 ^= xmm9
11456# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
11457# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
11458pxor  %xmm9,%xmm15
11459
11460# qhasm:           xmm15 &= xmm6
11461# asm 1: pand  <xmm6=int6464#4,<xmm15=int6464#16
11462# asm 2: pand  <xmm6=%xmm3,<xmm15=%xmm15
11463pand  %xmm3,%xmm15
11464
11465# qhasm:           xmm9 &= xmm7
11466# asm 1: pand  <xmm7=int6464#6,<xmm9=int6464#10
11467# asm 2: pand  <xmm7=%xmm5,<xmm9=%xmm9
11468pand  %xmm5,%xmm9
11469
11470# qhasm:           xmm15 ^= xmm9
11471# asm 1: pxor  <xmm9=int6464#10,<xmm15=int6464#16
11472# asm 2: pxor  <xmm9=%xmm9,<xmm15=%xmm15
11473pxor  %xmm9,%xmm15
11474
11475# qhasm:           xmm9 ^= xmm3
11476# asm 1: pxor  <xmm3=int6464#3,<xmm9=int6464#10
11477# asm 2: pxor  <xmm3=%xmm2,<xmm9=%xmm9
11478pxor  %xmm2,%xmm9
11479
11480# qhasm:         xmm15 ^= xmm4
11481# asm 1: pxor  <xmm4=int6464#1,<xmm15=int6464#16
11482# asm 2: pxor  <xmm4=%xmm0,<xmm15=%xmm15
11483pxor  %xmm0,%xmm15
11484
11485# qhasm:         xmm12 ^= xmm4
11486# asm 1: pxor  <xmm4=int6464#1,<xmm12=int6464#13
11487# asm 2: pxor  <xmm4=%xmm0,<xmm12=%xmm12
11488pxor  %xmm0,%xmm12
11489
11490# qhasm:         xmm9 ^= xmm0
11491# asm 1: pxor  <xmm0=int6464#2,<xmm9=int6464#10
11492# asm 2: pxor  <xmm0=%xmm1,<xmm9=%xmm9
11493pxor  %xmm1,%xmm9
11494
11495# qhasm:         xmm10 ^= xmm0
11496# asm 1: pxor  <xmm0=int6464#2,<xmm10=int6464#11
11497# asm 2: pxor  <xmm0=%xmm1,<xmm10=%xmm10
11498pxor  %xmm1,%xmm10
11499
11500# qhasm:       xmm15 ^= xmm8
11501# asm 1: pxor  <xmm8=int6464#9,<xmm15=int6464#16
11502# asm 2: pxor  <xmm8=%xmm8,<xmm15=%xmm15
11503pxor  %xmm8,%xmm15
11504
11505# qhasm:       xmm9 ^= xmm14
11506# asm 1: pxor  <xmm14=int6464#15,<xmm9=int6464#10
11507# asm 2: pxor  <xmm14=%xmm14,<xmm9=%xmm9
11508pxor  %xmm14,%xmm9
11509
11510# qhasm:       xmm12 ^= xmm15
11511# asm 1: pxor  <xmm15=int6464#16,<xmm12=int6464#13
11512# asm 2: pxor  <xmm15=%xmm15,<xmm12=%xmm12
11513pxor  %xmm15,%xmm12
11514
11515# qhasm:       xmm14 ^= xmm8
11516# asm 1: pxor  <xmm8=int6464#9,<xmm14=int6464#15
11517# asm 2: pxor  <xmm8=%xmm8,<xmm14=%xmm14
11518pxor  %xmm8,%xmm14
11519
11520# qhasm:       xmm8 ^= xmm9
11521# asm 1: pxor  <xmm9=int6464#10,<xmm8=int6464#9
11522# asm 2: pxor  <xmm9=%xmm9,<xmm8=%xmm8
11523pxor  %xmm9,%xmm8
11524
11525# qhasm:       xmm9 ^= xmm13
11526# asm 1: pxor  <xmm13=int6464#14,<xmm9=int6464#10
11527# asm 2: pxor  <xmm13=%xmm13,<xmm9=%xmm9
11528pxor  %xmm13,%xmm9
11529
11530# qhasm:       xmm13 ^= xmm10
11531# asm 1: pxor  <xmm10=int6464#11,<xmm13=int6464#14
11532# asm 2: pxor  <xmm10=%xmm10,<xmm13=%xmm13
11533pxor  %xmm10,%xmm13
11534
11535# qhasm:       xmm12 ^= xmm13
11536# asm 1: pxor  <xmm13=int6464#14,<xmm12=int6464#13
11537# asm 2: pxor  <xmm13=%xmm13,<xmm12=%xmm12
11538pxor  %xmm13,%xmm12
11539
11540# qhasm:       xmm10 ^= xmm11
11541# asm 1: pxor  <xmm11=int6464#12,<xmm10=int6464#11
11542# asm 2: pxor  <xmm11=%xmm11,<xmm10=%xmm10
11543pxor  %xmm11,%xmm10
11544
11545# qhasm:       xmm11 ^= xmm13
11546# asm 1: pxor  <xmm13=int6464#14,<xmm11=int6464#12
11547# asm 2: pxor  <xmm13=%xmm13,<xmm11=%xmm11
11548pxor  %xmm13,%xmm11
11549
11550# qhasm:       xmm14 ^= xmm11
11551# asm 1: pxor  <xmm11=int6464#12,<xmm14=int6464#15
11552# asm 2: pxor  <xmm11=%xmm11,<xmm14=%xmm14
11553pxor  %xmm11,%xmm14
11554
11555# qhasm:   xmm8 ^= *(int128 *)(c + 1280)
11556# asm 1: pxor 1280(<c=int64#4),<xmm8=int6464#9
11557# asm 2: pxor 1280(<c=%rcx),<xmm8=%xmm8
11558pxor 1280(%rcx),%xmm8
11559
11560# qhasm:   xmm9 ^= *(int128 *)(c + 1296)
11561# asm 1: pxor 1296(<c=int64#4),<xmm9=int6464#10
11562# asm 2: pxor 1296(<c=%rcx),<xmm9=%xmm9
11563pxor 1296(%rcx),%xmm9
11564
11565# qhasm:   xmm12 ^= *(int128 *)(c + 1312)
11566# asm 1: pxor 1312(<c=int64#4),<xmm12=int6464#13
11567# asm 2: pxor 1312(<c=%rcx),<xmm12=%xmm12
11568pxor 1312(%rcx),%xmm12
11569
11570# qhasm:   xmm14 ^= *(int128 *)(c + 1328)
11571# asm 1: pxor 1328(<c=int64#4),<xmm14=int6464#15
11572# asm 2: pxor 1328(<c=%rcx),<xmm14=%xmm14
11573pxor 1328(%rcx),%xmm14
11574
11575# qhasm:   xmm11 ^= *(int128 *)(c + 1344)
11576# asm 1: pxor 1344(<c=int64#4),<xmm11=int6464#12
11577# asm 2: pxor 1344(<c=%rcx),<xmm11=%xmm11
11578pxor 1344(%rcx),%xmm11
11579
11580# qhasm:   xmm15 ^= *(int128 *)(c + 1360)
11581# asm 1: pxor 1360(<c=int64#4),<xmm15=int6464#16
11582# asm 2: pxor 1360(<c=%rcx),<xmm15=%xmm15
11583pxor 1360(%rcx),%xmm15
11584
11585# qhasm:   xmm10 ^= *(int128 *)(c + 1376)
11586# asm 1: pxor 1376(<c=int64#4),<xmm10=int6464#11
11587# asm 2: pxor 1376(<c=%rcx),<xmm10=%xmm10
11588pxor 1376(%rcx),%xmm10
11589
11590# qhasm:   xmm13 ^= *(int128 *)(c + 1392)
11591# asm 1: pxor 1392(<c=int64#4),<xmm13=int6464#14
11592# asm 2: pxor 1392(<c=%rcx),<xmm13=%xmm13
11593pxor 1392(%rcx),%xmm13
11594
11595# qhasm:     xmm0 = xmm10
11596# asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1
11597# asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0
11598movdqa %xmm10,%xmm0
11599
11600# qhasm:     uint6464 xmm0 >>= 1
11601# asm 1: psrlq $1,<xmm0=int6464#1
11602# asm 2: psrlq $1,<xmm0=%xmm0
11603psrlq $1,%xmm0
11604
11605# qhasm:     xmm0 ^= xmm13
11606# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
11607# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
11608pxor  %xmm13,%xmm0
11609
11610# qhasm:     xmm0 &= BS0
11611# asm 1: pand  BS0,<xmm0=int6464#1
11612# asm 2: pand  BS0,<xmm0=%xmm0
11613pand  BS0,%xmm0
11614
11615# qhasm:     xmm13 ^= xmm0
11616# asm 1: pxor  <xmm0=int6464#1,<xmm13=int6464#14
11617# asm 2: pxor  <xmm0=%xmm0,<xmm13=%xmm13
11618pxor  %xmm0,%xmm13
11619
11620# qhasm:     uint6464 xmm0 <<= 1
11621# asm 1: psllq $1,<xmm0=int6464#1
11622# asm 2: psllq $1,<xmm0=%xmm0
11623psllq $1,%xmm0
11624
11625# qhasm:     xmm10 ^= xmm0
11626# asm 1: pxor  <xmm0=int6464#1,<xmm10=int6464#11
11627# asm 2: pxor  <xmm0=%xmm0,<xmm10=%xmm10
11628pxor  %xmm0,%xmm10
11629
11630# qhasm:     xmm0 = xmm11
11631# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11632# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11633movdqa %xmm11,%xmm0
11634
11635# qhasm:     uint6464 xmm0 >>= 1
11636# asm 1: psrlq $1,<xmm0=int6464#1
11637# asm 2: psrlq $1,<xmm0=%xmm0
11638psrlq $1,%xmm0
11639
11640# qhasm:     xmm0 ^= xmm15
11641# asm 1: pxor  <xmm15=int6464#16,<xmm0=int6464#1
11642# asm 2: pxor  <xmm15=%xmm15,<xmm0=%xmm0
11643pxor  %xmm15,%xmm0
11644
11645# qhasm:     xmm0 &= BS0
11646# asm 1: pand  BS0,<xmm0=int6464#1
11647# asm 2: pand  BS0,<xmm0=%xmm0
11648pand  BS0,%xmm0
11649
11650# qhasm:     xmm15 ^= xmm0
11651# asm 1: pxor  <xmm0=int6464#1,<xmm15=int6464#16
11652# asm 2: pxor  <xmm0=%xmm0,<xmm15=%xmm15
11653pxor  %xmm0,%xmm15
11654
11655# qhasm:     uint6464 xmm0 <<= 1
11656# asm 1: psllq $1,<xmm0=int6464#1
11657# asm 2: psllq $1,<xmm0=%xmm0
11658psllq $1,%xmm0
11659
11660# qhasm:     xmm11 ^= xmm0
11661# asm 1: pxor  <xmm0=int6464#1,<xmm11=int6464#12
11662# asm 2: pxor  <xmm0=%xmm0,<xmm11=%xmm11
11663pxor  %xmm0,%xmm11
11664
11665# qhasm:     xmm0 = xmm12
11666# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11667# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11668movdqa %xmm12,%xmm0
11669
11670# qhasm:     uint6464 xmm0 >>= 1
11671# asm 1: psrlq $1,<xmm0=int6464#1
11672# asm 2: psrlq $1,<xmm0=%xmm0
11673psrlq $1,%xmm0
11674
11675# qhasm:     xmm0 ^= xmm14
11676# asm 1: pxor  <xmm14=int6464#15,<xmm0=int6464#1
11677# asm 2: pxor  <xmm14=%xmm14,<xmm0=%xmm0
11678pxor  %xmm14,%xmm0
11679
11680# qhasm:     xmm0 &= BS0
11681# asm 1: pand  BS0,<xmm0=int6464#1
11682# asm 2: pand  BS0,<xmm0=%xmm0
11683pand  BS0,%xmm0
11684
11685# qhasm:     xmm14 ^= xmm0
11686# asm 1: pxor  <xmm0=int6464#1,<xmm14=int6464#15
11687# asm 2: pxor  <xmm0=%xmm0,<xmm14=%xmm14
11688pxor  %xmm0,%xmm14
11689
11690# qhasm:     uint6464 xmm0 <<= 1
11691# asm 1: psllq $1,<xmm0=int6464#1
11692# asm 2: psllq $1,<xmm0=%xmm0
11693psllq $1,%xmm0
11694
11695# qhasm:     xmm12 ^= xmm0
11696# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
11697# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
11698pxor  %xmm0,%xmm12
11699
11700# qhasm:     xmm0 = xmm8
11701# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11702# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11703movdqa %xmm8,%xmm0
11704
11705# qhasm:     uint6464 xmm0 >>= 1
11706# asm 1: psrlq $1,<xmm0=int6464#1
11707# asm 2: psrlq $1,<xmm0=%xmm0
11708psrlq $1,%xmm0
11709
11710# qhasm:     xmm0 ^= xmm9
11711# asm 1: pxor  <xmm9=int6464#10,<xmm0=int6464#1
11712# asm 2: pxor  <xmm9=%xmm9,<xmm0=%xmm0
11713pxor  %xmm9,%xmm0
11714
11715# qhasm:     xmm0 &= BS0
11716# asm 1: pand  BS0,<xmm0=int6464#1
11717# asm 2: pand  BS0,<xmm0=%xmm0
11718pand  BS0,%xmm0
11719
11720# qhasm:     xmm9 ^= xmm0
11721# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
11722# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
11723pxor  %xmm0,%xmm9
11724
11725# qhasm:     uint6464 xmm0 <<= 1
11726# asm 1: psllq $1,<xmm0=int6464#1
11727# asm 2: psllq $1,<xmm0=%xmm0
11728psllq $1,%xmm0
11729
11730# qhasm:     xmm8 ^= xmm0
11731# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
11732# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
11733pxor  %xmm0,%xmm8
11734
11735# qhasm:     xmm0 = xmm15
11736# asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1
11737# asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0
11738movdqa %xmm15,%xmm0
11739
11740# qhasm:     uint6464 xmm0 >>= 2
11741# asm 1: psrlq $2,<xmm0=int6464#1
11742# asm 2: psrlq $2,<xmm0=%xmm0
11743psrlq $2,%xmm0
11744
11745# qhasm:     xmm0 ^= xmm13
11746# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
11747# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
11748pxor  %xmm13,%xmm0
11749
11750# qhasm:     xmm0 &= BS1
11751# asm 1: pand  BS1,<xmm0=int6464#1
11752# asm 2: pand  BS1,<xmm0=%xmm0
11753pand  BS1,%xmm0
11754
11755# qhasm:     xmm13 ^= xmm0
11756# asm 1: pxor  <xmm0=int6464#1,<xmm13=int6464#14
11757# asm 2: pxor  <xmm0=%xmm0,<xmm13=%xmm13
11758pxor  %xmm0,%xmm13
11759
11760# qhasm:     uint6464 xmm0 <<= 2
11761# asm 1: psllq $2,<xmm0=int6464#1
11762# asm 2: psllq $2,<xmm0=%xmm0
11763psllq $2,%xmm0
11764
11765# qhasm:     xmm15 ^= xmm0
11766# asm 1: pxor  <xmm0=int6464#1,<xmm15=int6464#16
11767# asm 2: pxor  <xmm0=%xmm0,<xmm15=%xmm15
11768pxor  %xmm0,%xmm15
11769
11770# qhasm:     xmm0 = xmm11
11771# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11772# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11773movdqa %xmm11,%xmm0
11774
11775# qhasm:     uint6464 xmm0 >>= 2
11776# asm 1: psrlq $2,<xmm0=int6464#1
11777# asm 2: psrlq $2,<xmm0=%xmm0
11778psrlq $2,%xmm0
11779
11780# qhasm:     xmm0 ^= xmm10
11781# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#1
11782# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm0
11783pxor  %xmm10,%xmm0
11784
11785# qhasm:     xmm0 &= BS1
11786# asm 1: pand  BS1,<xmm0=int6464#1
11787# asm 2: pand  BS1,<xmm0=%xmm0
11788pand  BS1,%xmm0
11789
11790# qhasm:     xmm10 ^= xmm0
11791# asm 1: pxor  <xmm0=int6464#1,<xmm10=int6464#11
11792# asm 2: pxor  <xmm0=%xmm0,<xmm10=%xmm10
11793pxor  %xmm0,%xmm10
11794
11795# qhasm:     uint6464 xmm0 <<= 2
11796# asm 1: psllq $2,<xmm0=int6464#1
11797# asm 2: psllq $2,<xmm0=%xmm0
11798psllq $2,%xmm0
11799
11800# qhasm:     xmm11 ^= xmm0
11801# asm 1: pxor  <xmm0=int6464#1,<xmm11=int6464#12
11802# asm 2: pxor  <xmm0=%xmm0,<xmm11=%xmm11
11803pxor  %xmm0,%xmm11
11804
11805# qhasm:     xmm0 = xmm9
11806# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11807# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11808movdqa %xmm9,%xmm0
11809
11810# qhasm:     uint6464 xmm0 >>= 2
11811# asm 1: psrlq $2,<xmm0=int6464#1
11812# asm 2: psrlq $2,<xmm0=%xmm0
11813psrlq $2,%xmm0
11814
11815# qhasm:     xmm0 ^= xmm14
11816# asm 1: pxor  <xmm14=int6464#15,<xmm0=int6464#1
11817# asm 2: pxor  <xmm14=%xmm14,<xmm0=%xmm0
11818pxor  %xmm14,%xmm0
11819
11820# qhasm:     xmm0 &= BS1
11821# asm 1: pand  BS1,<xmm0=int6464#1
11822# asm 2: pand  BS1,<xmm0=%xmm0
11823pand  BS1,%xmm0
11824
11825# qhasm:     xmm14 ^= xmm0
11826# asm 1: pxor  <xmm0=int6464#1,<xmm14=int6464#15
11827# asm 2: pxor  <xmm0=%xmm0,<xmm14=%xmm14
11828pxor  %xmm0,%xmm14
11829
11830# qhasm:     uint6464 xmm0 <<= 2
11831# asm 1: psllq $2,<xmm0=int6464#1
11832# asm 2: psllq $2,<xmm0=%xmm0
11833psllq $2,%xmm0
11834
11835# qhasm:     xmm9 ^= xmm0
11836# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
11837# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
11838pxor  %xmm0,%xmm9
11839
11840# qhasm:     xmm0 = xmm8
11841# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11842# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11843movdqa %xmm8,%xmm0
11844
11845# qhasm:     uint6464 xmm0 >>= 2
11846# asm 1: psrlq $2,<xmm0=int6464#1
11847# asm 2: psrlq $2,<xmm0=%xmm0
11848psrlq $2,%xmm0
11849
11850# qhasm:     xmm0 ^= xmm12
11851# asm 1: pxor  <xmm12=int6464#13,<xmm0=int6464#1
11852# asm 2: pxor  <xmm12=%xmm12,<xmm0=%xmm0
11853pxor  %xmm12,%xmm0
11854
11855# qhasm:     xmm0 &= BS1
11856# asm 1: pand  BS1,<xmm0=int6464#1
11857# asm 2: pand  BS1,<xmm0=%xmm0
11858pand  BS1,%xmm0
11859
11860# qhasm:     xmm12 ^= xmm0
11861# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
11862# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
11863pxor  %xmm0,%xmm12
11864
11865# qhasm:     uint6464 xmm0 <<= 2
11866# asm 1: psllq $2,<xmm0=int6464#1
11867# asm 2: psllq $2,<xmm0=%xmm0
11868psllq $2,%xmm0
11869
11870# qhasm:     xmm8 ^= xmm0
11871# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
11872# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
11873pxor  %xmm0,%xmm8
11874
11875# qhasm:     xmm0 = xmm14
11876# asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1
11877# asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0
11878movdqa %xmm14,%xmm0
11879
11880# qhasm:     uint6464 xmm0 >>= 4
11881# asm 1: psrlq $4,<xmm0=int6464#1
11882# asm 2: psrlq $4,<xmm0=%xmm0
11883psrlq $4,%xmm0
11884
11885# qhasm:     xmm0 ^= xmm13
11886# asm 1: pxor  <xmm13=int6464#14,<xmm0=int6464#1
11887# asm 2: pxor  <xmm13=%xmm13,<xmm0=%xmm0
11888pxor  %xmm13,%xmm0
11889
11890# qhasm:     xmm0 &= BS2
11891# asm 1: pand  BS2,<xmm0=int6464#1
11892# asm 2: pand  BS2,<xmm0=%xmm0
11893pand  BS2,%xmm0
11894
11895# qhasm:     xmm13 ^= xmm0
11896# asm 1: pxor  <xmm0=int6464#1,<xmm13=int6464#14
11897# asm 2: pxor  <xmm0=%xmm0,<xmm13=%xmm13
11898pxor  %xmm0,%xmm13
11899
11900# qhasm:     uint6464 xmm0 <<= 4
11901# asm 1: psllq $4,<xmm0=int6464#1
11902# asm 2: psllq $4,<xmm0=%xmm0
11903psllq $4,%xmm0
11904
11905# qhasm:     xmm14 ^= xmm0
11906# asm 1: pxor  <xmm0=int6464#1,<xmm14=int6464#15
11907# asm 2: pxor  <xmm0=%xmm0,<xmm14=%xmm14
11908pxor  %xmm0,%xmm14
11909
11910# qhasm:     xmm0 = xmm12
11911# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11912# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11913movdqa %xmm12,%xmm0
11914
11915# qhasm:     uint6464 xmm0 >>= 4
11916# asm 1: psrlq $4,<xmm0=int6464#1
11917# asm 2: psrlq $4,<xmm0=%xmm0
11918psrlq $4,%xmm0
11919
11920# qhasm:     xmm0 ^= xmm10
11921# asm 1: pxor  <xmm10=int6464#11,<xmm0=int6464#1
11922# asm 2: pxor  <xmm10=%xmm10,<xmm0=%xmm0
11923pxor  %xmm10,%xmm0
11924
11925# qhasm:     xmm0 &= BS2
11926# asm 1: pand  BS2,<xmm0=int6464#1
11927# asm 2: pand  BS2,<xmm0=%xmm0
11928pand  BS2,%xmm0
11929
11930# qhasm:     xmm10 ^= xmm0
11931# asm 1: pxor  <xmm0=int6464#1,<xmm10=int6464#11
11932# asm 2: pxor  <xmm0=%xmm0,<xmm10=%xmm10
11933pxor  %xmm0,%xmm10
11934
11935# qhasm:     uint6464 xmm0 <<= 4
11936# asm 1: psllq $4,<xmm0=int6464#1
11937# asm 2: psllq $4,<xmm0=%xmm0
11938psllq $4,%xmm0
11939
11940# qhasm:     xmm12 ^= xmm0
11941# asm 1: pxor  <xmm0=int6464#1,<xmm12=int6464#13
11942# asm 2: pxor  <xmm0=%xmm0,<xmm12=%xmm12
11943pxor  %xmm0,%xmm12
11944
11945# qhasm:     xmm0 = xmm9
11946# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11947# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11948movdqa %xmm9,%xmm0
11949
11950# qhasm:     uint6464 xmm0 >>= 4
11951# asm 1: psrlq $4,<xmm0=int6464#1
11952# asm 2: psrlq $4,<xmm0=%xmm0
11953psrlq $4,%xmm0
11954
11955# qhasm:     xmm0 ^= xmm15
11956# asm 1: pxor  <xmm15=int6464#16,<xmm0=int6464#1
11957# asm 2: pxor  <xmm15=%xmm15,<xmm0=%xmm0
11958pxor  %xmm15,%xmm0
11959
11960# qhasm:     xmm0 &= BS2
11961# asm 1: pand  BS2,<xmm0=int6464#1
11962# asm 2: pand  BS2,<xmm0=%xmm0
11963pand  BS2,%xmm0
11964
11965# qhasm:     xmm15 ^= xmm0
11966# asm 1: pxor  <xmm0=int6464#1,<xmm15=int6464#16
11967# asm 2: pxor  <xmm0=%xmm0,<xmm15=%xmm15
11968pxor  %xmm0,%xmm15
11969
11970# qhasm:     uint6464 xmm0 <<= 4
11971# asm 1: psllq $4,<xmm0=int6464#1
11972# asm 2: psllq $4,<xmm0=%xmm0
11973psllq $4,%xmm0
11974
11975# qhasm:     xmm9 ^= xmm0
11976# asm 1: pxor  <xmm0=int6464#1,<xmm9=int6464#10
11977# asm 2: pxor  <xmm0=%xmm0,<xmm9=%xmm9
11978pxor  %xmm0,%xmm9
11979
11980# qhasm:     xmm0 = xmm8
11981# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11982# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11983movdqa %xmm8,%xmm0
11984
11985# qhasm:     uint6464 xmm0 >>= 4
11986# asm 1: psrlq $4,<xmm0=int6464#1
11987# asm 2: psrlq $4,<xmm0=%xmm0
11988psrlq $4,%xmm0
11989
11990# qhasm:     xmm0 ^= xmm11
11991# asm 1: pxor  <xmm11=int6464#12,<xmm0=int6464#1
11992# asm 2: pxor  <xmm11=%xmm11,<xmm0=%xmm0
11993pxor  %xmm11,%xmm0
11994
11995# qhasm:     xmm0 &= BS2
11996# asm 1: pand  BS2,<xmm0=int6464#1
11997# asm 2: pand  BS2,<xmm0=%xmm0
11998pand  BS2,%xmm0
11999
12000# qhasm:     xmm11 ^= xmm0
12001# asm 1: pxor  <xmm0=int6464#1,<xmm11=int6464#12
12002# asm 2: pxor  <xmm0=%xmm0,<xmm11=%xmm11
12003pxor  %xmm0,%xmm11
12004
12005# qhasm:     uint6464 xmm0 <<= 4
12006# asm 1: psllq $4,<xmm0=int6464#1
12007# asm 2: psllq $4,<xmm0=%xmm0
12008psllq $4,%xmm0
12009
12010# qhasm:     xmm8 ^= xmm0
12011# asm 1: pxor  <xmm0=int6464#1,<xmm8=int6464#9
12012# asm 2: pxor  <xmm0=%xmm0,<xmm8=%xmm8
12013pxor  %xmm0,%xmm8
12014
12015# qhasm: unsigned<? =? len-128
12016# asm 1: cmp  $128,<len=int64#2
12017# asm 2: cmp  $128,<len=%rsi
12018cmp  $128,%rsi
12019# comment:fp stack unchanged by jump
12020
12021# qhasm: goto partial if unsigned<
12022jb ._partial
12023# comment:fp stack unchanged by jump
12024
12025# qhasm: goto full if =
12026je ._full
12027
12028# qhasm: tmp = *(uint32 *)(np + 12)
12029# asm 1: movl   12(<np=int64#3),>tmp=int64#5d
12030# asm 2: movl   12(<np=%rdx),>tmp=%r8d
12031movl   12(%rdx),%r8d
12032
12033# qhasm: (uint32) bswap tmp
12034# asm 1: bswap <tmp=int64#5d
12035# asm 2: bswap <tmp=%r8d
12036bswap %r8d
12037
12038# qhasm: tmp += 8
12039# asm 1: add  $8,<tmp=int64#5
12040# asm 2: add  $8,<tmp=%r8
12041add  $8,%r8
12042
12043# qhasm: (uint32) bswap tmp
12044# asm 1: bswap <tmp=int64#5d
12045# asm 2: bswap <tmp=%r8d
12046bswap %r8d
12047
12048# qhasm: *(uint32 *)(np + 12) = tmp
12049# asm 1: movl   <tmp=int64#5d,12(<np=int64#3)
12050# asm 2: movl   <tmp=%r8d,12(<np=%rdx)
12051movl   %r8d,12(%rdx)
12052
12053# qhasm: *(int128 *) (outp + 0) = xmm8
12054# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12055# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12056movdqa %xmm8,0(%rdi)
12057
12058# qhasm: *(int128 *) (outp + 16) = xmm9
12059# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12060# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12061movdqa %xmm9,16(%rdi)
12062
12063# qhasm: *(int128 *) (outp + 32) = xmm12
12064# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12065# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12066movdqa %xmm12,32(%rdi)
12067
12068# qhasm: *(int128 *) (outp + 48) = xmm14
12069# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12070# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12071movdqa %xmm14,48(%rdi)
12072
12073# qhasm: *(int128 *) (outp + 64) = xmm11
12074# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12075# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12076movdqa %xmm11,64(%rdi)
12077
12078# qhasm: *(int128 *) (outp + 80) = xmm15
12079# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12080# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12081movdqa %xmm15,80(%rdi)
12082
12083# qhasm: *(int128 *) (outp + 96) = xmm10
12084# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12085# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12086movdqa %xmm10,96(%rdi)
12087
12088# qhasm: *(int128 *) (outp + 112) = xmm13
12089# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12090# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12091movdqa %xmm13,112(%rdi)
12092
12093# qhasm: len -= 128
12094# asm 1: sub  $128,<len=int64#2
12095# asm 2: sub  $128,<len=%rsi
12096sub  $128,%rsi
12097
12098# qhasm: outp += 128
12099# asm 1: add  $128,<outp=int64#1
12100# asm 2: add  $128,<outp=%rdi
12101add  $128,%rdi
12102# comment:fp stack unchanged by jump
12103
12104# qhasm: goto enc_block
12105jmp ._enc_block
12106
12107# qhasm: partial:
12108._partial:
12109
12110# qhasm: lensav = len
12111# asm 1: mov  <len=int64#2,>lensav=int64#4
12112# asm 2: mov  <len=%rsi,>lensav=%rcx
12113mov  %rsi,%rcx
12114
12115# qhasm: (uint32) len >>= 4
12116# asm 1: shr  $4,<len=int64#2d
12117# asm 2: shr  $4,<len=%esi
12118shr  $4,%esi
12119
12120# qhasm: tmp = *(uint32 *)(np + 12)
12121# asm 1: movl   12(<np=int64#3),>tmp=int64#5d
12122# asm 2: movl   12(<np=%rdx),>tmp=%r8d
12123movl   12(%rdx),%r8d
12124
12125# qhasm: (uint32) bswap tmp
12126# asm 1: bswap <tmp=int64#5d
12127# asm 2: bswap <tmp=%r8d
12128bswap %r8d
12129
12130# qhasm: tmp += len
12131# asm 1: add  <len=int64#2,<tmp=int64#5
12132# asm 2: add  <len=%rsi,<tmp=%r8
12133add  %rsi,%r8
12134
12135# qhasm: (uint32) bswap tmp
12136# asm 1: bswap <tmp=int64#5d
12137# asm 2: bswap <tmp=%r8d
12138bswap %r8d
12139
12140# qhasm: *(uint32 *)(np + 12) = tmp
12141# asm 1: movl   <tmp=int64#5d,12(<np=int64#3)
12142# asm 2: movl   <tmp=%r8d,12(<np=%rdx)
12143movl   %r8d,12(%rdx)
12144
12145# qhasm: blp = &bl
12146# asm 1: leaq <bl=stack1024#1,>blp=int64#2
12147# asm 2: leaq <bl=32(%rsp),>blp=%rsi
12148leaq 32(%rsp),%rsi
12149
12150# qhasm: *(int128 *)(blp + 0) = xmm8
12151# asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#2)
12152# asm 2: movdqa <xmm8=%xmm8,0(<blp=%rsi)
12153movdqa %xmm8,0(%rsi)
12154
12155# qhasm: *(int128 *)(blp + 16) = xmm9
12156# asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#2)
12157# asm 2: movdqa <xmm9=%xmm9,16(<blp=%rsi)
12158movdqa %xmm9,16(%rsi)
12159
12160# qhasm: *(int128 *)(blp + 32) = xmm12
12161# asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#2)
12162# asm 2: movdqa <xmm12=%xmm12,32(<blp=%rsi)
12163movdqa %xmm12,32(%rsi)
12164
12165# qhasm: *(int128 *)(blp + 48) = xmm14
12166# asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#2)
12167# asm 2: movdqa <xmm14=%xmm14,48(<blp=%rsi)
12168movdqa %xmm14,48(%rsi)
12169
12170# qhasm: *(int128 *)(blp + 64) = xmm11
12171# asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#2)
12172# asm 2: movdqa <xmm11=%xmm11,64(<blp=%rsi)
12173movdqa %xmm11,64(%rsi)
12174
12175# qhasm: *(int128 *)(blp + 80) = xmm15
12176# asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#2)
12177# asm 2: movdqa <xmm15=%xmm15,80(<blp=%rsi)
12178movdqa %xmm15,80(%rsi)
12179
12180# qhasm: *(int128 *)(blp + 96) = xmm10
12181# asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#2)
12182# asm 2: movdqa <xmm10=%xmm10,96(<blp=%rsi)
12183movdqa %xmm10,96(%rsi)
12184
12185# qhasm: *(int128 *)(blp + 112) = xmm13
12186# asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#2)
12187# asm 2: movdqa <xmm13=%xmm13,112(<blp=%rsi)
12188movdqa %xmm13,112(%rsi)
12189
12190# qhasm: bytes:
12191._bytes:
12192
12193# qhasm: =? lensav-0
12194# asm 1: cmp  $0,<lensav=int64#4
12195# asm 2: cmp  $0,<lensav=%rcx
12196cmp  $0,%rcx
12197# comment:fp stack unchanged by jump
12198
12199# qhasm: goto end if =
12200je ._end
12201
12202# qhasm: b = *(uint8 *)(blp + 0)
12203# asm 1: movzbq 0(<blp=int64#2),>b=int64#3
12204# asm 2: movzbq 0(<blp=%rsi),>b=%rdx
12205movzbq 0(%rsi),%rdx
12206
12207# qhasm: *(uint8 *)(outp + 0) = b
12208# asm 1: movb   <b=int64#3b,0(<outp=int64#1)
12209# asm 2: movb   <b=%dl,0(<outp=%rdi)
12210movb   %dl,0(%rdi)
12211
12212# qhasm: blp += 1
12213# asm 1: add  $1,<blp=int64#2
12214# asm 2: add  $1,<blp=%rsi
12215add  $1,%rsi
12216
12217# qhasm: outp +=1
12218# asm 1: add  $1,<outp=int64#1
12219# asm 2: add  $1,<outp=%rdi
12220add  $1,%rdi
12221
12222# qhasm: lensav -= 1
12223# asm 1: sub  $1,<lensav=int64#4
12224# asm 2: sub  $1,<lensav=%rcx
12225sub  $1,%rcx
12226# comment:fp stack unchanged by jump
12227
12228# qhasm: goto bytes
12229jmp ._bytes
12230
12231# qhasm: full:
12232._full:
12233
12234# qhasm: tmp = *(uint32 *)(np + 12)
12235# asm 1: movl   12(<np=int64#3),>tmp=int64#4d
12236# asm 2: movl   12(<np=%rdx),>tmp=%ecx
12237movl   12(%rdx),%ecx
12238
12239# qhasm: (uint32) bswap tmp
12240# asm 1: bswap <tmp=int64#4d
12241# asm 2: bswap <tmp=%ecx
12242bswap %ecx
12243
12244# qhasm: tmp += len
12245# asm 1: add  <len=int64#2,<tmp=int64#4
12246# asm 2: add  <len=%rsi,<tmp=%rcx
12247add  %rsi,%rcx
12248
12249# qhasm: (uint32) bswap tmp
12250# asm 1: bswap <tmp=int64#4d
12251# asm 2: bswap <tmp=%ecx
12252bswap %ecx
12253
12254# qhasm: *(uint32 *)(np + 12) = tmp
12255# asm 1: movl   <tmp=int64#4d,12(<np=int64#3)
12256# asm 2: movl   <tmp=%ecx,12(<np=%rdx)
12257movl   %ecx,12(%rdx)
12258
12259# qhasm: *(int128 *) (outp + 0) = xmm8
12260# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12261# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12262movdqa %xmm8,0(%rdi)
12263
12264# qhasm: *(int128 *) (outp + 16) = xmm9
12265# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12266# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12267movdqa %xmm9,16(%rdi)
12268
12269# qhasm: *(int128 *) (outp + 32) = xmm12
12270# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12271# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12272movdqa %xmm12,32(%rdi)
12273
12274# qhasm: *(int128 *) (outp + 48) = xmm14
12275# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12276# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12277movdqa %xmm14,48(%rdi)
12278
12279# qhasm: *(int128 *) (outp + 64) = xmm11
12280# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12281# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12282movdqa %xmm11,64(%rdi)
12283
12284# qhasm: *(int128 *) (outp + 80) = xmm15
12285# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12286# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12287movdqa %xmm15,80(%rdi)
12288
12289# qhasm: *(int128 *) (outp + 96) = xmm10
12290# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12291# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12292movdqa %xmm10,96(%rdi)
12293
12294# qhasm: *(int128 *) (outp + 112) = xmm13
12295# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12296# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12297movdqa %xmm13,112(%rdi)
12298# comment:fp stack unchanged by fallthrough
12299
12300# qhasm: end:
12301._end:
12302
12303# qhasm: leave
12304add %r11,%rsp
12305mov %rdi,%rax
12306mov %rsi,%rdx
12307xor %rax,%rax
12308ret
12309