summaryrefslogtreecommitdiff
path: root/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s
diff options
context:
space:
mode:
Diffstat (limited to 'nacl/crypto_stream/aes128ctr/core2/xor_afternm.s')
-rw-r--r--nacl/crypto_stream/aes128ctr/core2/xor_afternm.s12407
1 files changed, 12407 insertions, 0 deletions
diff --git a/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s
new file mode 100644
index 00000000..022691a2
--- /dev/null
+++ b/nacl/crypto_stream/aes128ctr/core2/xor_afternm.s
@@ -0,0 +1,12407 @@
1# Author: Emilia Käsper and Peter Schwabe
2# Date: 2009-03-19
3# +2010.01.31: minor namespace modifications
4# Public domain
5
6.data
7.p2align 6
8
9RCON: .int 0x00000000, 0x00000000, 0x00000000, 0xffffffff
10ROTB: .int 0x0c000000, 0x00000000, 0x04000000, 0x08000000
11EXPB0: .int 0x03030303, 0x07070707, 0x0b0b0b0b, 0x0f0f0f0f
12CTRINC1: .int 0x00000001, 0x00000000, 0x00000000, 0x00000000
13CTRINC2: .int 0x00000002, 0x00000000, 0x00000000, 0x00000000
14CTRINC3: .int 0x00000003, 0x00000000, 0x00000000, 0x00000000
15CTRINC4: .int 0x00000004, 0x00000000, 0x00000000, 0x00000000
16CTRINC5: .int 0x00000005, 0x00000000, 0x00000000, 0x00000000
17CTRINC6: .int 0x00000006, 0x00000000, 0x00000000, 0x00000000
18CTRINC7: .int 0x00000007, 0x00000000, 0x00000000, 0x00000000
19RCTRINC1: .int 0x00000000, 0x00000000, 0x00000000, 0x00000001
20RCTRINC2: .int 0x00000000, 0x00000000, 0x00000000, 0x00000002
21RCTRINC3: .int 0x00000000, 0x00000000, 0x00000000, 0x00000003
22RCTRINC4: .int 0x00000000, 0x00000000, 0x00000000, 0x00000004
23RCTRINC5: .int 0x00000000, 0x00000000, 0x00000000, 0x00000005
24RCTRINC6: .int 0x00000000, 0x00000000, 0x00000000, 0x00000006
25RCTRINC7: .int 0x00000000, 0x00000000, 0x00000000, 0x00000007
26
27SWAP32: .int 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
28M0SWAP: .quad 0x0105090d0004080c , 0x03070b0f02060a0e
29
30BS0: .quad 0x5555555555555555, 0x5555555555555555
31BS1: .quad 0x3333333333333333, 0x3333333333333333
32BS2: .quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
33ONE: .quad 0xffffffffffffffff, 0xffffffffffffffff
34M0: .quad 0x02060a0e03070b0f, 0x0004080c0105090d
35SRM0: .quad 0x0304090e00050a0f, 0x01060b0c0207080d
36SR: .quad 0x0504070600030201, 0x0f0e0d0c0a09080b
37
38# qhasm: int64 outp
39
40# qhasm: int64 inp
41
42# qhasm: int64 len
43
44# qhasm: int64 np
45
46# qhasm: int64 c
47
48# qhasm: input outp
49
50# qhasm: input inp
51
52# qhasm: input len
53
54# qhasm: input np
55
56# qhasm: input c
57
58# qhasm: int64 lensav
59
60# qhasm: int64 tmp
61
62# qhasm: int6464 xmm0
63
64# qhasm: int6464 xmm1
65
66# qhasm: int6464 xmm2
67
68# qhasm: int6464 xmm3
69
70# qhasm: int6464 xmm4
71
72# qhasm: int6464 xmm5
73
74# qhasm: int6464 xmm6
75
76# qhasm: int6464 xmm7
77
78# qhasm: int6464 xmm8
79
80# qhasm: int6464 xmm9
81
82# qhasm: int6464 xmm10
83
84# qhasm: int6464 xmm11
85
86# qhasm: int6464 xmm12
87
88# qhasm: int6464 xmm13
89
90# qhasm: int6464 xmm14
91
92# qhasm: int6464 xmm15
93
94# qhasm: int6464 t
95
96# qhasm: stack1024 bl
97
98# qhasm: stack128 nonce_stack
99
100# qhasm: int64 blp
101
102# qhasm: int64 b
103
104# qhasm: enter crypto_stream_aes128ctr_core2_xor_afternm
105.text
106.p2align 5
107.globl _crypto_stream_aes128ctr_core2_xor_afternm
108.globl crypto_stream_aes128ctr_core2_xor_afternm
109_crypto_stream_aes128ctr_core2_xor_afternm:
110crypto_stream_aes128ctr_core2_xor_afternm:
111mov %rsp,%r11
112and $31,%r11
113add $160,%r11
114sub %r11,%rsp
115
116# qhasm: xmm0 = *(int128 *) (np + 0)
117# asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1
118# asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0
119movdqa 0(%rcx),%xmm0
120
121# qhasm: nonce_stack = xmm0
122# asm 1: movdqa <xmm0=int6464#1,>nonce_stack=stack128#1
123# asm 2: movdqa <xmm0=%xmm0,>nonce_stack=0(%rsp)
124movdqa %xmm0,0(%rsp)
125
126# qhasm: np = &nonce_stack
127# asm 1: leaq <nonce_stack=stack128#1,>np=int64#4
128# asm 2: leaq <nonce_stack=0(%rsp),>np=%rcx
129leaq 0(%rsp),%rcx
130
131# qhasm: enc_block:
132._enc_block:
133
134# qhasm: xmm0 = *(int128 *) (np + 0)
135# asm 1: movdqa 0(<np=int64#4),>xmm0=int6464#1
136# asm 2: movdqa 0(<np=%rcx),>xmm0=%xmm0
137movdqa 0(%rcx),%xmm0
138
139# qhasm: xmm1 = xmm0
140# asm 1: movdqa <xmm0=int6464#1,>xmm1=int6464#2
141# asm 2: movdqa <xmm0=%xmm0,>xmm1=%xmm1
142movdqa %xmm0,%xmm1
143
144# qhasm: shuffle bytes of xmm1 by SWAP32
145# asm 1: pshufb SWAP32,<xmm1=int6464#2
146# asm 2: pshufb SWAP32,<xmm1=%xmm1
147pshufb SWAP32,%xmm1
148
149# qhasm: xmm2 = xmm1
150# asm 1: movdqa <xmm1=int6464#2,>xmm2=int6464#3
151# asm 2: movdqa <xmm1=%xmm1,>xmm2=%xmm2
152movdqa %xmm1,%xmm2
153
154# qhasm: xmm3 = xmm1
155# asm 1: movdqa <xmm1=int6464#2,>xmm3=int6464#4
156# asm 2: movdqa <xmm1=%xmm1,>xmm3=%xmm3
157movdqa %xmm1,%xmm3
158
159# qhasm: xmm4 = xmm1
160# asm 1: movdqa <xmm1=int6464#2,>xmm4=int6464#5
161# asm 2: movdqa <xmm1=%xmm1,>xmm4=%xmm4
162movdqa %xmm1,%xmm4
163
164# qhasm: xmm5 = xmm1
165# asm 1: movdqa <xmm1=int6464#2,>xmm5=int6464#6
166# asm 2: movdqa <xmm1=%xmm1,>xmm5=%xmm5
167movdqa %xmm1,%xmm5
168
169# qhasm: xmm6 = xmm1
170# asm 1: movdqa <xmm1=int6464#2,>xmm6=int6464#7
171# asm 2: movdqa <xmm1=%xmm1,>xmm6=%xmm6
172movdqa %xmm1,%xmm6
173
174# qhasm: xmm7 = xmm1
175# asm 1: movdqa <xmm1=int6464#2,>xmm7=int6464#8
176# asm 2: movdqa <xmm1=%xmm1,>xmm7=%xmm7
177movdqa %xmm1,%xmm7
178
179# qhasm: int32323232 xmm1 += RCTRINC1
180# asm 1: paddd RCTRINC1,<xmm1=int6464#2
181# asm 2: paddd RCTRINC1,<xmm1=%xmm1
182paddd RCTRINC1,%xmm1
183
184# qhasm: int32323232 xmm2 += RCTRINC2
185# asm 1: paddd RCTRINC2,<xmm2=int6464#3
186# asm 2: paddd RCTRINC2,<xmm2=%xmm2
187paddd RCTRINC2,%xmm2
188
189# qhasm: int32323232 xmm3 += RCTRINC3
190# asm 1: paddd RCTRINC3,<xmm3=int6464#4
191# asm 2: paddd RCTRINC3,<xmm3=%xmm3
192paddd RCTRINC3,%xmm3
193
194# qhasm: int32323232 xmm4 += RCTRINC4
195# asm 1: paddd RCTRINC4,<xmm4=int6464#5
196# asm 2: paddd RCTRINC4,<xmm4=%xmm4
197paddd RCTRINC4,%xmm4
198
199# qhasm: int32323232 xmm5 += RCTRINC5
200# asm 1: paddd RCTRINC5,<xmm5=int6464#6
201# asm 2: paddd RCTRINC5,<xmm5=%xmm5
202paddd RCTRINC5,%xmm5
203
204# qhasm: int32323232 xmm6 += RCTRINC6
205# asm 1: paddd RCTRINC6,<xmm6=int6464#7
206# asm 2: paddd RCTRINC6,<xmm6=%xmm6
207paddd RCTRINC6,%xmm6
208
209# qhasm: int32323232 xmm7 += RCTRINC7
210# asm 1: paddd RCTRINC7,<xmm7=int6464#8
211# asm 2: paddd RCTRINC7,<xmm7=%xmm7
212paddd RCTRINC7,%xmm7
213
214# qhasm: shuffle bytes of xmm0 by M0
215# asm 1: pshufb M0,<xmm0=int6464#1
216# asm 2: pshufb M0,<xmm0=%xmm0
217pshufb M0,%xmm0
218
219# qhasm: shuffle bytes of xmm1 by M0SWAP
220# asm 1: pshufb M0SWAP,<xmm1=int6464#2
221# asm 2: pshufb M0SWAP,<xmm1=%xmm1
222pshufb M0SWAP,%xmm1
223
224# qhasm: shuffle bytes of xmm2 by M0SWAP
225# asm 1: pshufb M0SWAP,<xmm2=int6464#3
226# asm 2: pshufb M0SWAP,<xmm2=%xmm2
227pshufb M0SWAP,%xmm2
228
229# qhasm: shuffle bytes of xmm3 by M0SWAP
230# asm 1: pshufb M0SWAP,<xmm3=int6464#4
231# asm 2: pshufb M0SWAP,<xmm3=%xmm3
232pshufb M0SWAP,%xmm3
233
234# qhasm: shuffle bytes of xmm4 by M0SWAP
235# asm 1: pshufb M0SWAP,<xmm4=int6464#5
236# asm 2: pshufb M0SWAP,<xmm4=%xmm4
237pshufb M0SWAP,%xmm4
238
239# qhasm: shuffle bytes of xmm5 by M0SWAP
240# asm 1: pshufb M0SWAP,<xmm5=int6464#6
241# asm 2: pshufb M0SWAP,<xmm5=%xmm5
242pshufb M0SWAP,%xmm5
243
244# qhasm: shuffle bytes of xmm6 by M0SWAP
245# asm 1: pshufb M0SWAP,<xmm6=int6464#7
246# asm 2: pshufb M0SWAP,<xmm6=%xmm6
247pshufb M0SWAP,%xmm6
248
249# qhasm: shuffle bytes of xmm7 by M0SWAP
250# asm 1: pshufb M0SWAP,<xmm7=int6464#8
251# asm 2: pshufb M0SWAP,<xmm7=%xmm7
252pshufb M0SWAP,%xmm7
253
254# qhasm: xmm8 = xmm6
255# asm 1: movdqa <xmm6=int6464#7,>xmm8=int6464#9
256# asm 2: movdqa <xmm6=%xmm6,>xmm8=%xmm8
257movdqa %xmm6,%xmm8
258
259# qhasm: uint6464 xmm8 >>= 1
260# asm 1: psrlq $1,<xmm8=int6464#9
261# asm 2: psrlq $1,<xmm8=%xmm8
262psrlq $1,%xmm8
263
264# qhasm: xmm8 ^= xmm7
265# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
266# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
267pxor %xmm7,%xmm8
268
269# qhasm: xmm8 &= BS0
270# asm 1: pand BS0,<xmm8=int6464#9
271# asm 2: pand BS0,<xmm8=%xmm8
272pand BS0,%xmm8
273
274# qhasm: xmm7 ^= xmm8
275# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
276# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
277pxor %xmm8,%xmm7
278
279# qhasm: uint6464 xmm8 <<= 1
280# asm 1: psllq $1,<xmm8=int6464#9
281# asm 2: psllq $1,<xmm8=%xmm8
282psllq $1,%xmm8
283
284# qhasm: xmm6 ^= xmm8
285# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
286# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
287pxor %xmm8,%xmm6
288
289# qhasm: xmm8 = xmm4
290# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
291# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
292movdqa %xmm4,%xmm8
293
294# qhasm: uint6464 xmm8 >>= 1
295# asm 1: psrlq $1,<xmm8=int6464#9
296# asm 2: psrlq $1,<xmm8=%xmm8
297psrlq $1,%xmm8
298
299# qhasm: xmm8 ^= xmm5
300# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
301# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
302pxor %xmm5,%xmm8
303
304# qhasm: xmm8 &= BS0
305# asm 1: pand BS0,<xmm8=int6464#9
306# asm 2: pand BS0,<xmm8=%xmm8
307pand BS0,%xmm8
308
309# qhasm: xmm5 ^= xmm8
310# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
311# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
312pxor %xmm8,%xmm5
313
314# qhasm: uint6464 xmm8 <<= 1
315# asm 1: psllq $1,<xmm8=int6464#9
316# asm 2: psllq $1,<xmm8=%xmm8
317psllq $1,%xmm8
318
319# qhasm: xmm4 ^= xmm8
320# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
321# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
322pxor %xmm8,%xmm4
323
324# qhasm: xmm8 = xmm2
325# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
326# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
327movdqa %xmm2,%xmm8
328
329# qhasm: uint6464 xmm8 >>= 1
330# asm 1: psrlq $1,<xmm8=int6464#9
331# asm 2: psrlq $1,<xmm8=%xmm8
332psrlq $1,%xmm8
333
334# qhasm: xmm8 ^= xmm3
335# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
336# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
337pxor %xmm3,%xmm8
338
339# qhasm: xmm8 &= BS0
340# asm 1: pand BS0,<xmm8=int6464#9
341# asm 2: pand BS0,<xmm8=%xmm8
342pand BS0,%xmm8
343
344# qhasm: xmm3 ^= xmm8
345# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
346# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
347pxor %xmm8,%xmm3
348
349# qhasm: uint6464 xmm8 <<= 1
350# asm 1: psllq $1,<xmm8=int6464#9
351# asm 2: psllq $1,<xmm8=%xmm8
352psllq $1,%xmm8
353
354# qhasm: xmm2 ^= xmm8
355# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
356# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
357pxor %xmm8,%xmm2
358
359# qhasm: xmm8 = xmm0
360# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
361# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
362movdqa %xmm0,%xmm8
363
364# qhasm: uint6464 xmm8 >>= 1
365# asm 1: psrlq $1,<xmm8=int6464#9
366# asm 2: psrlq $1,<xmm8=%xmm8
367psrlq $1,%xmm8
368
369# qhasm: xmm8 ^= xmm1
370# asm 1: pxor <xmm1=int6464#2,<xmm8=int6464#9
371# asm 2: pxor <xmm1=%xmm1,<xmm8=%xmm8
372pxor %xmm1,%xmm8
373
374# qhasm: xmm8 &= BS0
375# asm 1: pand BS0,<xmm8=int6464#9
376# asm 2: pand BS0,<xmm8=%xmm8
377pand BS0,%xmm8
378
379# qhasm: xmm1 ^= xmm8
380# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
381# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
382pxor %xmm8,%xmm1
383
384# qhasm: uint6464 xmm8 <<= 1
385# asm 1: psllq $1,<xmm8=int6464#9
386# asm 2: psllq $1,<xmm8=%xmm8
387psllq $1,%xmm8
388
389# qhasm: xmm0 ^= xmm8
390# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
391# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
392pxor %xmm8,%xmm0
393
394# qhasm: xmm8 = xmm5
395# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#9
396# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm8
397movdqa %xmm5,%xmm8
398
399# qhasm: uint6464 xmm8 >>= 2
400# asm 1: psrlq $2,<xmm8=int6464#9
401# asm 2: psrlq $2,<xmm8=%xmm8
402psrlq $2,%xmm8
403
404# qhasm: xmm8 ^= xmm7
405# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
406# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
407pxor %xmm7,%xmm8
408
409# qhasm: xmm8 &= BS1
410# asm 1: pand BS1,<xmm8=int6464#9
411# asm 2: pand BS1,<xmm8=%xmm8
412pand BS1,%xmm8
413
414# qhasm: xmm7 ^= xmm8
415# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
416# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
417pxor %xmm8,%xmm7
418
419# qhasm: uint6464 xmm8 <<= 2
420# asm 1: psllq $2,<xmm8=int6464#9
421# asm 2: psllq $2,<xmm8=%xmm8
422psllq $2,%xmm8
423
424# qhasm: xmm5 ^= xmm8
425# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
426# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
427pxor %xmm8,%xmm5
428
429# qhasm: xmm8 = xmm4
430# asm 1: movdqa <xmm4=int6464#5,>xmm8=int6464#9
431# asm 2: movdqa <xmm4=%xmm4,>xmm8=%xmm8
432movdqa %xmm4,%xmm8
433
434# qhasm: uint6464 xmm8 >>= 2
435# asm 1: psrlq $2,<xmm8=int6464#9
436# asm 2: psrlq $2,<xmm8=%xmm8
437psrlq $2,%xmm8
438
439# qhasm: xmm8 ^= xmm6
440# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
441# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
442pxor %xmm6,%xmm8
443
444# qhasm: xmm8 &= BS1
445# asm 1: pand BS1,<xmm8=int6464#9
446# asm 2: pand BS1,<xmm8=%xmm8
447pand BS1,%xmm8
448
449# qhasm: xmm6 ^= xmm8
450# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
451# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
452pxor %xmm8,%xmm6
453
454# qhasm: uint6464 xmm8 <<= 2
455# asm 1: psllq $2,<xmm8=int6464#9
456# asm 2: psllq $2,<xmm8=%xmm8
457psllq $2,%xmm8
458
459# qhasm: xmm4 ^= xmm8
460# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
461# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
462pxor %xmm8,%xmm4
463
464# qhasm: xmm8 = xmm1
465# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
466# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
467movdqa %xmm1,%xmm8
468
469# qhasm: uint6464 xmm8 >>= 2
470# asm 1: psrlq $2,<xmm8=int6464#9
471# asm 2: psrlq $2,<xmm8=%xmm8
472psrlq $2,%xmm8
473
474# qhasm: xmm8 ^= xmm3
475# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#9
476# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm8
477pxor %xmm3,%xmm8
478
479# qhasm: xmm8 &= BS1
480# asm 1: pand BS1,<xmm8=int6464#9
481# asm 2: pand BS1,<xmm8=%xmm8
482pand BS1,%xmm8
483
484# qhasm: xmm3 ^= xmm8
485# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
486# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
487pxor %xmm8,%xmm3
488
489# qhasm: uint6464 xmm8 <<= 2
490# asm 1: psllq $2,<xmm8=int6464#9
491# asm 2: psllq $2,<xmm8=%xmm8
492psllq $2,%xmm8
493
494# qhasm: xmm1 ^= xmm8
495# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
496# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
497pxor %xmm8,%xmm1
498
499# qhasm: xmm8 = xmm0
500# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
501# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
502movdqa %xmm0,%xmm8
503
504# qhasm: uint6464 xmm8 >>= 2
505# asm 1: psrlq $2,<xmm8=int6464#9
506# asm 2: psrlq $2,<xmm8=%xmm8
507psrlq $2,%xmm8
508
509# qhasm: xmm8 ^= xmm2
510# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#9
511# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm8
512pxor %xmm2,%xmm8
513
514# qhasm: xmm8 &= BS1
515# asm 1: pand BS1,<xmm8=int6464#9
516# asm 2: pand BS1,<xmm8=%xmm8
517pand BS1,%xmm8
518
519# qhasm: xmm2 ^= xmm8
520# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
521# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
522pxor %xmm8,%xmm2
523
524# qhasm: uint6464 xmm8 <<= 2
525# asm 1: psllq $2,<xmm8=int6464#9
526# asm 2: psllq $2,<xmm8=%xmm8
527psllq $2,%xmm8
528
529# qhasm: xmm0 ^= xmm8
530# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
531# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
532pxor %xmm8,%xmm0
533
534# qhasm: xmm8 = xmm3
535# asm 1: movdqa <xmm3=int6464#4,>xmm8=int6464#9
536# asm 2: movdqa <xmm3=%xmm3,>xmm8=%xmm8
537movdqa %xmm3,%xmm8
538
539# qhasm: uint6464 xmm8 >>= 4
540# asm 1: psrlq $4,<xmm8=int6464#9
541# asm 2: psrlq $4,<xmm8=%xmm8
542psrlq $4,%xmm8
543
544# qhasm: xmm8 ^= xmm7
545# asm 1: pxor <xmm7=int6464#8,<xmm8=int6464#9
546# asm 2: pxor <xmm7=%xmm7,<xmm8=%xmm8
547pxor %xmm7,%xmm8
548
549# qhasm: xmm8 &= BS2
550# asm 1: pand BS2,<xmm8=int6464#9
551# asm 2: pand BS2,<xmm8=%xmm8
552pand BS2,%xmm8
553
554# qhasm: xmm7 ^= xmm8
555# asm 1: pxor <xmm8=int6464#9,<xmm7=int6464#8
556# asm 2: pxor <xmm8=%xmm8,<xmm7=%xmm7
557pxor %xmm8,%xmm7
558
559# qhasm: uint6464 xmm8 <<= 4
560# asm 1: psllq $4,<xmm8=int6464#9
561# asm 2: psllq $4,<xmm8=%xmm8
562psllq $4,%xmm8
563
564# qhasm: xmm3 ^= xmm8
565# asm 1: pxor <xmm8=int6464#9,<xmm3=int6464#4
566# asm 2: pxor <xmm8=%xmm8,<xmm3=%xmm3
567pxor %xmm8,%xmm3
568
569# qhasm: xmm8 = xmm2
570# asm 1: movdqa <xmm2=int6464#3,>xmm8=int6464#9
571# asm 2: movdqa <xmm2=%xmm2,>xmm8=%xmm8
572movdqa %xmm2,%xmm8
573
574# qhasm: uint6464 xmm8 >>= 4
575# asm 1: psrlq $4,<xmm8=int6464#9
576# asm 2: psrlq $4,<xmm8=%xmm8
577psrlq $4,%xmm8
578
579# qhasm: xmm8 ^= xmm6
580# asm 1: pxor <xmm6=int6464#7,<xmm8=int6464#9
581# asm 2: pxor <xmm6=%xmm6,<xmm8=%xmm8
582pxor %xmm6,%xmm8
583
584# qhasm: xmm8 &= BS2
585# asm 1: pand BS2,<xmm8=int6464#9
586# asm 2: pand BS2,<xmm8=%xmm8
587pand BS2,%xmm8
588
589# qhasm: xmm6 ^= xmm8
590# asm 1: pxor <xmm8=int6464#9,<xmm6=int6464#7
591# asm 2: pxor <xmm8=%xmm8,<xmm6=%xmm6
592pxor %xmm8,%xmm6
593
594# qhasm: uint6464 xmm8 <<= 4
595# asm 1: psllq $4,<xmm8=int6464#9
596# asm 2: psllq $4,<xmm8=%xmm8
597psllq $4,%xmm8
598
599# qhasm: xmm2 ^= xmm8
600# asm 1: pxor <xmm8=int6464#9,<xmm2=int6464#3
601# asm 2: pxor <xmm8=%xmm8,<xmm2=%xmm2
602pxor %xmm8,%xmm2
603
604# qhasm: xmm8 = xmm1
605# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#9
606# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm8
607movdqa %xmm1,%xmm8
608
609# qhasm: uint6464 xmm8 >>= 4
610# asm 1: psrlq $4,<xmm8=int6464#9
611# asm 2: psrlq $4,<xmm8=%xmm8
612psrlq $4,%xmm8
613
614# qhasm: xmm8 ^= xmm5
615# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
616# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
617pxor %xmm5,%xmm8
618
619# qhasm: xmm8 &= BS2
620# asm 1: pand BS2,<xmm8=int6464#9
621# asm 2: pand BS2,<xmm8=%xmm8
622pand BS2,%xmm8
623
624# qhasm: xmm5 ^= xmm8
625# asm 1: pxor <xmm8=int6464#9,<xmm5=int6464#6
626# asm 2: pxor <xmm8=%xmm8,<xmm5=%xmm5
627pxor %xmm8,%xmm5
628
629# qhasm: uint6464 xmm8 <<= 4
630# asm 1: psllq $4,<xmm8=int6464#9
631# asm 2: psllq $4,<xmm8=%xmm8
632psllq $4,%xmm8
633
634# qhasm: xmm1 ^= xmm8
635# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
636# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
637pxor %xmm8,%xmm1
638
639# qhasm: xmm8 = xmm0
640# asm 1: movdqa <xmm0=int6464#1,>xmm8=int6464#9
641# asm 2: movdqa <xmm0=%xmm0,>xmm8=%xmm8
642movdqa %xmm0,%xmm8
643
644# qhasm: uint6464 xmm8 >>= 4
645# asm 1: psrlq $4,<xmm8=int6464#9
646# asm 2: psrlq $4,<xmm8=%xmm8
647psrlq $4,%xmm8
648
649# qhasm: xmm8 ^= xmm4
650# asm 1: pxor <xmm4=int6464#5,<xmm8=int6464#9
651# asm 2: pxor <xmm4=%xmm4,<xmm8=%xmm8
652pxor %xmm4,%xmm8
653
654# qhasm: xmm8 &= BS2
655# asm 1: pand BS2,<xmm8=int6464#9
656# asm 2: pand BS2,<xmm8=%xmm8
657pand BS2,%xmm8
658
659# qhasm: xmm4 ^= xmm8
660# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
661# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
662pxor %xmm8,%xmm4
663
664# qhasm: uint6464 xmm8 <<= 4
665# asm 1: psllq $4,<xmm8=int6464#9
666# asm 2: psllq $4,<xmm8=%xmm8
667psllq $4,%xmm8
668
669# qhasm: xmm0 ^= xmm8
670# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
671# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
672pxor %xmm8,%xmm0
673
674# qhasm: xmm0 ^= *(int128 *)(c + 0)
675# asm 1: pxor 0(<c=int64#5),<xmm0=int6464#1
676# asm 2: pxor 0(<c=%r8),<xmm0=%xmm0
677pxor 0(%r8),%xmm0
678
679# qhasm: shuffle bytes of xmm0 by SR
680# asm 1: pshufb SR,<xmm0=int6464#1
681# asm 2: pshufb SR,<xmm0=%xmm0
682pshufb SR,%xmm0
683
684# qhasm: xmm1 ^= *(int128 *)(c + 16)
685# asm 1: pxor 16(<c=int64#5),<xmm1=int6464#2
686# asm 2: pxor 16(<c=%r8),<xmm1=%xmm1
687pxor 16(%r8),%xmm1
688
689# qhasm: shuffle bytes of xmm1 by SR
690# asm 1: pshufb SR,<xmm1=int6464#2
691# asm 2: pshufb SR,<xmm1=%xmm1
692pshufb SR,%xmm1
693
694# qhasm: xmm2 ^= *(int128 *)(c + 32)
695# asm 1: pxor 32(<c=int64#5),<xmm2=int6464#3
696# asm 2: pxor 32(<c=%r8),<xmm2=%xmm2
697pxor 32(%r8),%xmm2
698
699# qhasm: shuffle bytes of xmm2 by SR
700# asm 1: pshufb SR,<xmm2=int6464#3
701# asm 2: pshufb SR,<xmm2=%xmm2
702pshufb SR,%xmm2
703
704# qhasm: xmm3 ^= *(int128 *)(c + 48)
705# asm 1: pxor 48(<c=int64#5),<xmm3=int6464#4
706# asm 2: pxor 48(<c=%r8),<xmm3=%xmm3
707pxor 48(%r8),%xmm3
708
709# qhasm: shuffle bytes of xmm3 by SR
710# asm 1: pshufb SR,<xmm3=int6464#4
711# asm 2: pshufb SR,<xmm3=%xmm3
712pshufb SR,%xmm3
713
714# qhasm: xmm4 ^= *(int128 *)(c + 64)
715# asm 1: pxor 64(<c=int64#5),<xmm4=int6464#5
716# asm 2: pxor 64(<c=%r8),<xmm4=%xmm4
717pxor 64(%r8),%xmm4
718
719# qhasm: shuffle bytes of xmm4 by SR
720# asm 1: pshufb SR,<xmm4=int6464#5
721# asm 2: pshufb SR,<xmm4=%xmm4
722pshufb SR,%xmm4
723
724# qhasm: xmm5 ^= *(int128 *)(c + 80)
725# asm 1: pxor 80(<c=int64#5),<xmm5=int6464#6
726# asm 2: pxor 80(<c=%r8),<xmm5=%xmm5
727pxor 80(%r8),%xmm5
728
729# qhasm: shuffle bytes of xmm5 by SR
730# asm 1: pshufb SR,<xmm5=int6464#6
731# asm 2: pshufb SR,<xmm5=%xmm5
732pshufb SR,%xmm5
733
734# qhasm: xmm6 ^= *(int128 *)(c + 96)
735# asm 1: pxor 96(<c=int64#5),<xmm6=int6464#7
736# asm 2: pxor 96(<c=%r8),<xmm6=%xmm6
737pxor 96(%r8),%xmm6
738
739# qhasm: shuffle bytes of xmm6 by SR
740# asm 1: pshufb SR,<xmm6=int6464#7
741# asm 2: pshufb SR,<xmm6=%xmm6
742pshufb SR,%xmm6
743
744# qhasm: xmm7 ^= *(int128 *)(c + 112)
745# asm 1: pxor 112(<c=int64#5),<xmm7=int6464#8
746# asm 2: pxor 112(<c=%r8),<xmm7=%xmm7
747pxor 112(%r8),%xmm7
748
749# qhasm: shuffle bytes of xmm7 by SR
750# asm 1: pshufb SR,<xmm7=int6464#8
751# asm 2: pshufb SR,<xmm7=%xmm7
752pshufb SR,%xmm7
753
754# qhasm: xmm5 ^= xmm6
755# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
756# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
757pxor %xmm6,%xmm5
758
759# qhasm: xmm2 ^= xmm1
760# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
761# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
762pxor %xmm1,%xmm2
763
764# qhasm: xmm5 ^= xmm0
765# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
766# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
767pxor %xmm0,%xmm5
768
769# qhasm: xmm6 ^= xmm2
770# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
771# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
772pxor %xmm2,%xmm6
773
774# qhasm: xmm3 ^= xmm0
775# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
776# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
777pxor %xmm0,%xmm3
778
779# qhasm: xmm6 ^= xmm3
780# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
781# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
782pxor %xmm3,%xmm6
783
784# qhasm: xmm3 ^= xmm7
785# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
786# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
787pxor %xmm7,%xmm3
788
789# qhasm: xmm3 ^= xmm4
790# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
791# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
792pxor %xmm4,%xmm3
793
794# qhasm: xmm7 ^= xmm5
795# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
796# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
797pxor %xmm5,%xmm7
798
799# qhasm: xmm3 ^= xmm1
800# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
801# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
802pxor %xmm1,%xmm3
803
804# qhasm: xmm4 ^= xmm5
805# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
806# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
807pxor %xmm5,%xmm4
808
809# qhasm: xmm2 ^= xmm7
810# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
811# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
812pxor %xmm7,%xmm2
813
814# qhasm: xmm1 ^= xmm5
815# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
816# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
817pxor %xmm5,%xmm1
818
819# qhasm: xmm11 = xmm7
820# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
821# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
822movdqa %xmm7,%xmm8
823
824# qhasm: xmm10 = xmm1
825# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
826# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
827movdqa %xmm1,%xmm9
828
829# qhasm: xmm9 = xmm5
830# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
831# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
832movdqa %xmm5,%xmm10
833
834# qhasm: xmm13 = xmm2
835# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
836# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
837movdqa %xmm2,%xmm11
838
839# qhasm: xmm12 = xmm6
840# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
841# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
842movdqa %xmm6,%xmm12
843
844# qhasm: xmm11 ^= xmm4
845# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
846# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
847pxor %xmm4,%xmm8
848
849# qhasm: xmm10 ^= xmm2
850# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
851# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
852pxor %xmm2,%xmm9
853
854# qhasm: xmm9 ^= xmm3
855# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
856# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
857pxor %xmm3,%xmm10
858
859# qhasm: xmm13 ^= xmm4
860# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
861# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
862pxor %xmm4,%xmm11
863
864# qhasm: xmm12 ^= xmm0
865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
867pxor %xmm0,%xmm12
868
869# qhasm: xmm14 = xmm11
870# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
871# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
872movdqa %xmm8,%xmm13
873
874# qhasm: xmm8 = xmm10
875# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
876# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
877movdqa %xmm9,%xmm14
878
879# qhasm: xmm15 = xmm11
880# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
881# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
882movdqa %xmm8,%xmm15
883
884# qhasm: xmm10 |= xmm9
885# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
886# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
887por %xmm10,%xmm9
888
889# qhasm: xmm11 |= xmm12
890# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
891# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
892por %xmm12,%xmm8
893
894# qhasm: xmm15 ^= xmm8
895# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
896# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
897pxor %xmm14,%xmm15
898
899# qhasm: xmm14 &= xmm12
900# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
901# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
902pand %xmm12,%xmm13
903
904# qhasm: xmm8 &= xmm9
905# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
906# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
907pand %xmm10,%xmm14
908
909# qhasm: xmm12 ^= xmm9
910# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
911# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
912pxor %xmm10,%xmm12
913
914# qhasm: xmm15 &= xmm12
915# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
916# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
917pand %xmm12,%xmm15
918
919# qhasm: xmm12 = xmm3
920# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
921# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
922movdqa %xmm3,%xmm10
923
924# qhasm: xmm12 ^= xmm0
925# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
926# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
927pxor %xmm0,%xmm10
928
929# qhasm: xmm13 &= xmm12
930# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
931# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
932pand %xmm10,%xmm11
933
934# qhasm: xmm11 ^= xmm13
935# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
936# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
937pxor %xmm11,%xmm8
938
939# qhasm: xmm10 ^= xmm13
940# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
941# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
942pxor %xmm11,%xmm9
943
944# qhasm: xmm13 = xmm7
945# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
946# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
947movdqa %xmm7,%xmm10
948
949# qhasm: xmm13 ^= xmm1
950# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
951# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
952pxor %xmm1,%xmm10
953
954# qhasm: xmm12 = xmm5
955# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
956# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
957movdqa %xmm5,%xmm11
958
959# qhasm: xmm9 = xmm13
960# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
961# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
962movdqa %xmm10,%xmm12
963
964# qhasm: xmm12 ^= xmm6
965# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
966# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
967pxor %xmm6,%xmm11
968
969# qhasm: xmm9 |= xmm12
970# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
971# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
972por %xmm11,%xmm12
973
974# qhasm: xmm13 &= xmm12
975# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
976# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
977pand %xmm11,%xmm10
978
979# qhasm: xmm8 ^= xmm13
980# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
981# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
982pxor %xmm10,%xmm14
983
984# qhasm: xmm11 ^= xmm15
985# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
986# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
987pxor %xmm15,%xmm8
988
989# qhasm: xmm10 ^= xmm14
990# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
991# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
992pxor %xmm13,%xmm9
993
994# qhasm: xmm9 ^= xmm15
995# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
996# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
997pxor %xmm15,%xmm12
998
999# qhasm: xmm8 ^= xmm14
1000# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
1001# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
1002pxor %xmm13,%xmm14
1003
1004# qhasm: xmm9 ^= xmm14
1005# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1006# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1007pxor %xmm13,%xmm12
1008
1009# qhasm: xmm12 = xmm2
1010# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
1011# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
1012movdqa %xmm2,%xmm10
1013
1014# qhasm: xmm13 = xmm4
1015# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
1016# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
1017movdqa %xmm4,%xmm11
1018
1019# qhasm: xmm14 = xmm1
1020# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
1021# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
1022movdqa %xmm1,%xmm13
1023
1024# qhasm: xmm15 = xmm7
1025# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
1026# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
1027movdqa %xmm7,%xmm15
1028
1029# qhasm: xmm12 &= xmm3
1030# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
1031# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
1032pand %xmm3,%xmm10
1033
1034# qhasm: xmm13 &= xmm0
1035# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
1036# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
1037pand %xmm0,%xmm11
1038
1039# qhasm: xmm14 &= xmm5
1040# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
1041# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
1042pand %xmm5,%xmm13
1043
1044# qhasm: xmm15 |= xmm6
1045# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
1046# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
1047por %xmm6,%xmm15
1048
1049# qhasm: xmm11 ^= xmm12
1050# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
1051# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
1052pxor %xmm10,%xmm8
1053
1054# qhasm: xmm10 ^= xmm13
1055# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
1056# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
1057pxor %xmm11,%xmm9
1058
1059# qhasm: xmm9 ^= xmm14
1060# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
1061# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
1062pxor %xmm13,%xmm12
1063
1064# qhasm: xmm8 ^= xmm15
1065# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
1066# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
1067pxor %xmm15,%xmm14
1068
1069# qhasm: xmm12 = xmm11
1070# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
1071# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
1072movdqa %xmm8,%xmm10
1073
1074# qhasm: xmm12 ^= xmm10
1075# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
1076# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
1077pxor %xmm9,%xmm10
1078
1079# qhasm: xmm11 &= xmm9
1080# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
1081# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
1082pand %xmm12,%xmm8
1083
1084# qhasm: xmm14 = xmm8
1085# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
1086# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
1087movdqa %xmm14,%xmm11
1088
1089# qhasm: xmm14 ^= xmm11
1090# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
1091# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
1092pxor %xmm8,%xmm11
1093
1094# qhasm: xmm15 = xmm12
1095# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
1096# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
1097movdqa %xmm10,%xmm13
1098
1099# qhasm: xmm15 &= xmm14
1100# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
1101# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
1102pand %xmm11,%xmm13
1103
1104# qhasm: xmm15 ^= xmm10
1105# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
1106# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
1107pxor %xmm9,%xmm13
1108
1109# qhasm: xmm13 = xmm9
1110# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
1111# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
1112movdqa %xmm12,%xmm15
1113
1114# qhasm: xmm13 ^= xmm8
1115# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1116# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1117pxor %xmm14,%xmm15
1118
1119# qhasm: xmm11 ^= xmm10
1120# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
1121# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
1122pxor %xmm9,%xmm8
1123
1124# qhasm: xmm13 &= xmm11
1125# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
1126# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
1127pand %xmm8,%xmm15
1128
1129# qhasm: xmm13 ^= xmm8
1130# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
1131# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
1132pxor %xmm14,%xmm15
1133
1134# qhasm: xmm9 ^= xmm13
1135# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
1136# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
1137pxor %xmm15,%xmm12
1138
1139# qhasm: xmm10 = xmm14
1140# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
1141# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
1142movdqa %xmm11,%xmm8
1143
1144# qhasm: xmm10 ^= xmm13
1145# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
1146# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
1147pxor %xmm15,%xmm8
1148
1149# qhasm: xmm10 &= xmm8
1150# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
1151# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
1152pand %xmm14,%xmm8
1153
1154# qhasm: xmm9 ^= xmm10
1155# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
1156# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
1157pxor %xmm8,%xmm12
1158
1159# qhasm: xmm14 ^= xmm10
1160# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
1161# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
1162pxor %xmm8,%xmm11
1163
1164# qhasm: xmm14 &= xmm15
1165# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
1166# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
1167pand %xmm13,%xmm11
1168
1169# qhasm: xmm14 ^= xmm12
1170# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
1171# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
1172pxor %xmm10,%xmm11
1173
1174# qhasm: xmm12 = xmm6
1175# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
1176# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
1177movdqa %xmm6,%xmm8
1178
1179# qhasm: xmm8 = xmm5
1180# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
1181# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
1182movdqa %xmm5,%xmm9
1183
1184# qhasm: xmm10 = xmm15
1185# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
1186# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
1187movdqa %xmm13,%xmm10
1188
1189# qhasm: xmm10 ^= xmm14
1190# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
1191# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
1192pxor %xmm11,%xmm10
1193
1194# qhasm: xmm10 &= xmm6
1195# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
1196# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
1197pand %xmm6,%xmm10
1198
1199# qhasm: xmm6 ^= xmm5
1200# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1201# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1202pxor %xmm5,%xmm6
1203
1204# qhasm: xmm6 &= xmm14
1205# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
1206# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
1207pand %xmm11,%xmm6
1208
1209# qhasm: xmm5 &= xmm15
1210# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
1211# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
1212pand %xmm13,%xmm5
1213
1214# qhasm: xmm6 ^= xmm5
1215# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
1216# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
1217pxor %xmm5,%xmm6
1218
1219# qhasm: xmm5 ^= xmm10
1220# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
1221# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
1222pxor %xmm10,%xmm5
1223
1224# qhasm: xmm12 ^= xmm0
1225# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
1226# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
1227pxor %xmm0,%xmm8
1228
1229# qhasm: xmm8 ^= xmm3
1230# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
1231# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
1232pxor %xmm3,%xmm9
1233
1234# qhasm: xmm15 ^= xmm13
1235# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1236# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1237pxor %xmm15,%xmm13
1238
1239# qhasm: xmm14 ^= xmm9
1240# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1241# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1242pxor %xmm12,%xmm11
1243
1244# qhasm: xmm11 = xmm15
1245# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1246# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1247movdqa %xmm13,%xmm10
1248
1249# qhasm: xmm11 ^= xmm14
1250# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1251# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1252pxor %xmm11,%xmm10
1253
1254# qhasm: xmm11 &= xmm12
1255# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1256# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1257pand %xmm8,%xmm10
1258
1259# qhasm: xmm12 ^= xmm8
1260# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1261# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1262pxor %xmm9,%xmm8
1263
1264# qhasm: xmm12 &= xmm14
1265# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1266# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1267pand %xmm11,%xmm8
1268
1269# qhasm: xmm8 &= xmm15
1270# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1271# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1272pand %xmm13,%xmm9
1273
1274# qhasm: xmm8 ^= xmm12
1275# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1276# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1277pxor %xmm8,%xmm9
1278
1279# qhasm: xmm12 ^= xmm11
1280# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1281# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1282pxor %xmm10,%xmm8
1283
1284# qhasm: xmm10 = xmm13
1285# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1286# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1287movdqa %xmm15,%xmm10
1288
1289# qhasm: xmm10 ^= xmm9
1290# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1291# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1292pxor %xmm12,%xmm10
1293
1294# qhasm: xmm10 &= xmm0
1295# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
1296# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
1297pand %xmm0,%xmm10
1298
1299# qhasm: xmm0 ^= xmm3
1300# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1301# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1302pxor %xmm3,%xmm0
1303
1304# qhasm: xmm0 &= xmm9
1305# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
1306# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
1307pand %xmm12,%xmm0
1308
1309# qhasm: xmm3 &= xmm13
1310# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
1311# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
1312pand %xmm15,%xmm3
1313
1314# qhasm: xmm0 ^= xmm3
1315# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
1316# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
1317pxor %xmm3,%xmm0
1318
1319# qhasm: xmm3 ^= xmm10
1320# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
1321# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
1322pxor %xmm10,%xmm3
1323
1324# qhasm: xmm6 ^= xmm12
1325# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
1326# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
1327pxor %xmm8,%xmm6
1328
1329# qhasm: xmm0 ^= xmm12
1330# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
1331# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
1332pxor %xmm8,%xmm0
1333
1334# qhasm: xmm5 ^= xmm8
1335# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
1336# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
1337pxor %xmm9,%xmm5
1338
1339# qhasm: xmm3 ^= xmm8
1340# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
1341# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
1342pxor %xmm9,%xmm3
1343
1344# qhasm: xmm12 = xmm7
1345# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
1346# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
1347movdqa %xmm7,%xmm8
1348
1349# qhasm: xmm8 = xmm1
1350# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
1351# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
1352movdqa %xmm1,%xmm9
1353
1354# qhasm: xmm12 ^= xmm4
1355# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
1356# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
1357pxor %xmm4,%xmm8
1358
1359# qhasm: xmm8 ^= xmm2
1360# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
1361# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
1362pxor %xmm2,%xmm9
1363
1364# qhasm: xmm11 = xmm15
1365# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1366# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1367movdqa %xmm13,%xmm10
1368
1369# qhasm: xmm11 ^= xmm14
1370# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1371# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1372pxor %xmm11,%xmm10
1373
1374# qhasm: xmm11 &= xmm12
1375# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
1376# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
1377pand %xmm8,%xmm10
1378
1379# qhasm: xmm12 ^= xmm8
1380# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
1381# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
1382pxor %xmm9,%xmm8
1383
1384# qhasm: xmm12 &= xmm14
1385# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
1386# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
1387pand %xmm11,%xmm8
1388
1389# qhasm: xmm8 &= xmm15
1390# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
1391# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
1392pand %xmm13,%xmm9
1393
1394# qhasm: xmm8 ^= xmm12
1395# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
1396# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
1397pxor %xmm8,%xmm9
1398
1399# qhasm: xmm12 ^= xmm11
1400# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
1401# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
1402pxor %xmm10,%xmm8
1403
1404# qhasm: xmm10 = xmm13
1405# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
1406# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
1407movdqa %xmm15,%xmm10
1408
1409# qhasm: xmm10 ^= xmm9
1410# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
1411# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
1412pxor %xmm12,%xmm10
1413
1414# qhasm: xmm10 &= xmm4
1415# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
1416# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
1417pand %xmm4,%xmm10
1418
1419# qhasm: xmm4 ^= xmm2
1420# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1421# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1422pxor %xmm2,%xmm4
1423
1424# qhasm: xmm4 &= xmm9
1425# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
1426# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
1427pand %xmm12,%xmm4
1428
1429# qhasm: xmm2 &= xmm13
1430# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
1431# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
1432pand %xmm15,%xmm2
1433
1434# qhasm: xmm4 ^= xmm2
1435# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
1436# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
1437pxor %xmm2,%xmm4
1438
1439# qhasm: xmm2 ^= xmm10
1440# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
1441# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
1442pxor %xmm10,%xmm2
1443
1444# qhasm: xmm15 ^= xmm13
1445# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
1446# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
1447pxor %xmm15,%xmm13
1448
1449# qhasm: xmm14 ^= xmm9
1450# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
1451# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
1452pxor %xmm12,%xmm11
1453
1454# qhasm: xmm11 = xmm15
1455# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
1456# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
1457movdqa %xmm13,%xmm10
1458
1459# qhasm: xmm11 ^= xmm14
1460# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
1461# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
1462pxor %xmm11,%xmm10
1463
1464# qhasm: xmm11 &= xmm7
1465# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
1466# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
1467pand %xmm7,%xmm10
1468
1469# qhasm: xmm7 ^= xmm1
1470# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1471# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1472pxor %xmm1,%xmm7
1473
1474# qhasm: xmm7 &= xmm14
1475# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
1476# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
1477pand %xmm11,%xmm7
1478
1479# qhasm: xmm1 &= xmm15
1480# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
1481# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
1482pand %xmm13,%xmm1
1483
1484# qhasm: xmm7 ^= xmm1
1485# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
1486# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
1487pxor %xmm1,%xmm7
1488
1489# qhasm: xmm1 ^= xmm11
1490# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
1491# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
1492pxor %xmm10,%xmm1
1493
1494# qhasm: xmm7 ^= xmm12
1495# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
1496# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
1497pxor %xmm8,%xmm7
1498
1499# qhasm: xmm4 ^= xmm12
1500# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
1501# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
1502pxor %xmm8,%xmm4
1503
1504# qhasm: xmm1 ^= xmm8
1505# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
1506# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
1507pxor %xmm9,%xmm1
1508
1509# qhasm: xmm2 ^= xmm8
1510# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
1511# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
1512pxor %xmm9,%xmm2
1513
1514# qhasm: xmm7 ^= xmm0
1515# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
1516# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
1517pxor %xmm0,%xmm7
1518
1519# qhasm: xmm1 ^= xmm6
1520# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
1521# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
1522pxor %xmm6,%xmm1
1523
1524# qhasm: xmm4 ^= xmm7
1525# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
1526# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
1527pxor %xmm7,%xmm4
1528
1529# qhasm: xmm6 ^= xmm0
1530# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
1531# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
1532pxor %xmm0,%xmm6
1533
1534# qhasm: xmm0 ^= xmm1
1535# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
1536# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
1537pxor %xmm1,%xmm0
1538
1539# qhasm: xmm1 ^= xmm5
1540# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
1541# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
1542pxor %xmm5,%xmm1
1543
1544# qhasm: xmm5 ^= xmm2
1545# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
1546# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
1547pxor %xmm2,%xmm5
1548
1549# qhasm: xmm4 ^= xmm5
1550# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
1551# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
1552pxor %xmm5,%xmm4
1553
1554# qhasm: xmm2 ^= xmm3
1555# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
1556# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
1557pxor %xmm3,%xmm2
1558
1559# qhasm: xmm3 ^= xmm5
1560# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
1561# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
1562pxor %xmm5,%xmm3
1563
1564# qhasm: xmm6 ^= xmm3
1565# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
1566# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
1567pxor %xmm3,%xmm6
1568
1569# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
1570# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
1571# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
1572pshufd $0x93,%xmm0,%xmm8
1573
1574# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
1575# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
1576# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
1577pshufd $0x93,%xmm1,%xmm9
1578
1579# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
1580# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
1581# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
1582pshufd $0x93,%xmm4,%xmm10
1583
1584# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
1585# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
1586# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
1587pshufd $0x93,%xmm6,%xmm11
1588
1589# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
1590# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
1591# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
1592pshufd $0x93,%xmm3,%xmm12
1593
1594# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
1595# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
1596# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
1597pshufd $0x93,%xmm7,%xmm13
1598
1599# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
1600# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
1601# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
1602pshufd $0x93,%xmm2,%xmm14
1603
1604# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
1605# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
1606# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
1607pshufd $0x93,%xmm5,%xmm15
1608
1609# qhasm: xmm0 ^= xmm8
1610# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
1611# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
1612pxor %xmm8,%xmm0
1613
1614# qhasm: xmm1 ^= xmm9
1615# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
1616# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
1617pxor %xmm9,%xmm1
1618
1619# qhasm: xmm4 ^= xmm10
1620# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
1621# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
1622pxor %xmm10,%xmm4
1623
1624# qhasm: xmm6 ^= xmm11
1625# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
1626# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
1627pxor %xmm11,%xmm6
1628
1629# qhasm: xmm3 ^= xmm12
1630# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
1631# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
1632pxor %xmm12,%xmm3
1633
1634# qhasm: xmm7 ^= xmm13
1635# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
1636# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
1637pxor %xmm13,%xmm7
1638
1639# qhasm: xmm2 ^= xmm14
1640# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
1641# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
1642pxor %xmm14,%xmm2
1643
1644# qhasm: xmm5 ^= xmm15
1645# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
1646# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
1647pxor %xmm15,%xmm5
1648
1649# qhasm: xmm8 ^= xmm5
1650# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
1651# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
1652pxor %xmm5,%xmm8
1653
1654# qhasm: xmm9 ^= xmm0
1655# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
1656# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
1657pxor %xmm0,%xmm9
1658
1659# qhasm: xmm10 ^= xmm1
1660# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
1661# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
1662pxor %xmm1,%xmm10
1663
1664# qhasm: xmm9 ^= xmm5
1665# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
1666# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
1667pxor %xmm5,%xmm9
1668
1669# qhasm: xmm11 ^= xmm4
1670# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
1671# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
1672pxor %xmm4,%xmm11
1673
1674# qhasm: xmm12 ^= xmm6
1675# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
1676# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
1677pxor %xmm6,%xmm12
1678
1679# qhasm: xmm13 ^= xmm3
1680# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
1681# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
1682pxor %xmm3,%xmm13
1683
1684# qhasm: xmm11 ^= xmm5
1685# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
1686# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
1687pxor %xmm5,%xmm11
1688
1689# qhasm: xmm14 ^= xmm7
1690# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
1691# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
1692pxor %xmm7,%xmm14
1693
1694# qhasm: xmm15 ^= xmm2
1695# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
1696# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
1697pxor %xmm2,%xmm15
1698
1699# qhasm: xmm12 ^= xmm5
1700# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
1701# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
1702pxor %xmm5,%xmm12
1703
1704# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
1705# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
1706# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
1707pshufd $0x4E,%xmm0,%xmm0
1708
1709# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
1710# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
1711# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
1712pshufd $0x4E,%xmm1,%xmm1
1713
1714# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
1715# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
1716# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
1717pshufd $0x4E,%xmm4,%xmm4
1718
1719# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
1720# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
1721# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
1722pshufd $0x4E,%xmm6,%xmm6
1723
1724# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
1725# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
1726# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
1727pshufd $0x4E,%xmm3,%xmm3
1728
1729# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
1730# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
1731# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
1732pshufd $0x4E,%xmm7,%xmm7
1733
1734# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
1735# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
1736# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
1737pshufd $0x4E,%xmm2,%xmm2
1738
1739# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
1740# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
1741# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
1742pshufd $0x4E,%xmm5,%xmm5
1743
1744# qhasm: xmm8 ^= xmm0
1745# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
1746# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
1747pxor %xmm0,%xmm8
1748
1749# qhasm: xmm9 ^= xmm1
1750# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
1751# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
1752pxor %xmm1,%xmm9
1753
1754# qhasm: xmm10 ^= xmm4
1755# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
1756# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
1757pxor %xmm4,%xmm10
1758
1759# qhasm: xmm11 ^= xmm6
1760# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
1761# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
1762pxor %xmm6,%xmm11
1763
1764# qhasm: xmm12 ^= xmm3
1765# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
1766# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
1767pxor %xmm3,%xmm12
1768
1769# qhasm: xmm13 ^= xmm7
1770# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
1771# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
1772pxor %xmm7,%xmm13
1773
1774# qhasm: xmm14 ^= xmm2
1775# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
1776# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
1777pxor %xmm2,%xmm14
1778
1779# qhasm: xmm15 ^= xmm5
1780# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
1781# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
1782pxor %xmm5,%xmm15
1783
1784# qhasm: xmm8 ^= *(int128 *)(c + 128)
1785# asm 1: pxor 128(<c=int64#5),<xmm8=int6464#9
1786# asm 2: pxor 128(<c=%r8),<xmm8=%xmm8
1787pxor 128(%r8),%xmm8
1788
1789# qhasm: shuffle bytes of xmm8 by SR
1790# asm 1: pshufb SR,<xmm8=int6464#9
1791# asm 2: pshufb SR,<xmm8=%xmm8
1792pshufb SR,%xmm8
1793
1794# qhasm: xmm9 ^= *(int128 *)(c + 144)
1795# asm 1: pxor 144(<c=int64#5),<xmm9=int6464#10
1796# asm 2: pxor 144(<c=%r8),<xmm9=%xmm9
1797pxor 144(%r8),%xmm9
1798
1799# qhasm: shuffle bytes of xmm9 by SR
1800# asm 1: pshufb SR,<xmm9=int6464#10
1801# asm 2: pshufb SR,<xmm9=%xmm9
1802pshufb SR,%xmm9
1803
1804# qhasm: xmm10 ^= *(int128 *)(c + 160)
1805# asm 1: pxor 160(<c=int64#5),<xmm10=int6464#11
1806# asm 2: pxor 160(<c=%r8),<xmm10=%xmm10
1807pxor 160(%r8),%xmm10
1808
1809# qhasm: shuffle bytes of xmm10 by SR
1810# asm 1: pshufb SR,<xmm10=int6464#11
1811# asm 2: pshufb SR,<xmm10=%xmm10
1812pshufb SR,%xmm10
1813
1814# qhasm: xmm11 ^= *(int128 *)(c + 176)
1815# asm 1: pxor 176(<c=int64#5),<xmm11=int6464#12
1816# asm 2: pxor 176(<c=%r8),<xmm11=%xmm11
1817pxor 176(%r8),%xmm11
1818
1819# qhasm: shuffle bytes of xmm11 by SR
1820# asm 1: pshufb SR,<xmm11=int6464#12
1821# asm 2: pshufb SR,<xmm11=%xmm11
1822pshufb SR,%xmm11
1823
1824# qhasm: xmm12 ^= *(int128 *)(c + 192)
1825# asm 1: pxor 192(<c=int64#5),<xmm12=int6464#13
1826# asm 2: pxor 192(<c=%r8),<xmm12=%xmm12
1827pxor 192(%r8),%xmm12
1828
1829# qhasm: shuffle bytes of xmm12 by SR
1830# asm 1: pshufb SR,<xmm12=int6464#13
1831# asm 2: pshufb SR,<xmm12=%xmm12
1832pshufb SR,%xmm12
1833
1834# qhasm: xmm13 ^= *(int128 *)(c + 208)
1835# asm 1: pxor 208(<c=int64#5),<xmm13=int6464#14
1836# asm 2: pxor 208(<c=%r8),<xmm13=%xmm13
1837pxor 208(%r8),%xmm13
1838
1839# qhasm: shuffle bytes of xmm13 by SR
1840# asm 1: pshufb SR,<xmm13=int6464#14
1841# asm 2: pshufb SR,<xmm13=%xmm13
1842pshufb SR,%xmm13
1843
1844# qhasm: xmm14 ^= *(int128 *)(c + 224)
1845# asm 1: pxor 224(<c=int64#5),<xmm14=int6464#15
1846# asm 2: pxor 224(<c=%r8),<xmm14=%xmm14
1847pxor 224(%r8),%xmm14
1848
1849# qhasm: shuffle bytes of xmm14 by SR
1850# asm 1: pshufb SR,<xmm14=int6464#15
1851# asm 2: pshufb SR,<xmm14=%xmm14
1852pshufb SR,%xmm14
1853
1854# qhasm: xmm15 ^= *(int128 *)(c + 240)
1855# asm 1: pxor 240(<c=int64#5),<xmm15=int6464#16
1856# asm 2: pxor 240(<c=%r8),<xmm15=%xmm15
1857pxor 240(%r8),%xmm15
1858
1859# qhasm: shuffle bytes of xmm15 by SR
1860# asm 1: pshufb SR,<xmm15=int6464#16
1861# asm 2: pshufb SR,<xmm15=%xmm15
1862pshufb SR,%xmm15
1863
1864# qhasm: xmm13 ^= xmm14
1865# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
1866# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
1867pxor %xmm14,%xmm13
1868
1869# qhasm: xmm10 ^= xmm9
1870# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
1871# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
1872pxor %xmm9,%xmm10
1873
1874# qhasm: xmm13 ^= xmm8
1875# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
1876# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
1877pxor %xmm8,%xmm13
1878
1879# qhasm: xmm14 ^= xmm10
1880# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
1881# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
1882pxor %xmm10,%xmm14
1883
1884# qhasm: xmm11 ^= xmm8
1885# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
1886# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
1887pxor %xmm8,%xmm11
1888
1889# qhasm: xmm14 ^= xmm11
1890# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
1891# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
1892pxor %xmm11,%xmm14
1893
1894# qhasm: xmm11 ^= xmm15
1895# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
1896# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
1897pxor %xmm15,%xmm11
1898
1899# qhasm: xmm11 ^= xmm12
1900# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
1901# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
1902pxor %xmm12,%xmm11
1903
1904# qhasm: xmm15 ^= xmm13
1905# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
1906# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
1907pxor %xmm13,%xmm15
1908
1909# qhasm: xmm11 ^= xmm9
1910# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
1911# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
1912pxor %xmm9,%xmm11
1913
1914# qhasm: xmm12 ^= xmm13
1915# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
1916# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
1917pxor %xmm13,%xmm12
1918
1919# qhasm: xmm10 ^= xmm15
1920# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
1921# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
1922pxor %xmm15,%xmm10
1923
1924# qhasm: xmm9 ^= xmm13
1925# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
1926# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
1927pxor %xmm13,%xmm9
1928
1929# qhasm: xmm3 = xmm15
1930# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
1931# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
1932movdqa %xmm15,%xmm0
1933
1934# qhasm: xmm2 = xmm9
1935# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
1936# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
1937movdqa %xmm9,%xmm1
1938
1939# qhasm: xmm1 = xmm13
1940# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
1941# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
1942movdqa %xmm13,%xmm2
1943
1944# qhasm: xmm5 = xmm10
1945# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
1946# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
1947movdqa %xmm10,%xmm3
1948
1949# qhasm: xmm4 = xmm14
1950# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
1951# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
1952movdqa %xmm14,%xmm4
1953
1954# qhasm: xmm3 ^= xmm12
1955# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
1956# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
1957pxor %xmm12,%xmm0
1958
1959# qhasm: xmm2 ^= xmm10
1960# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
1961# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
1962pxor %xmm10,%xmm1
1963
1964# qhasm: xmm1 ^= xmm11
1965# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
1966# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
1967pxor %xmm11,%xmm2
1968
1969# qhasm: xmm5 ^= xmm12
1970# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
1971# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
1972pxor %xmm12,%xmm3
1973
1974# qhasm: xmm4 ^= xmm8
1975# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
1976# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
1977pxor %xmm8,%xmm4
1978
1979# qhasm: xmm6 = xmm3
1980# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
1981# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
1982movdqa %xmm0,%xmm5
1983
1984# qhasm: xmm0 = xmm2
1985# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
1986# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
1987movdqa %xmm1,%xmm6
1988
1989# qhasm: xmm7 = xmm3
1990# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
1991# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
1992movdqa %xmm0,%xmm7
1993
1994# qhasm: xmm2 |= xmm1
1995# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
1996# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
1997por %xmm2,%xmm1
1998
1999# qhasm: xmm3 |= xmm4
2000# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
2001# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
2002por %xmm4,%xmm0
2003
2004# qhasm: xmm7 ^= xmm0
2005# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
2006# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
2007pxor %xmm6,%xmm7
2008
2009# qhasm: xmm6 &= xmm4
2010# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
2011# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
2012pand %xmm4,%xmm5
2013
2014# qhasm: xmm0 &= xmm1
2015# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
2016# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
2017pand %xmm2,%xmm6
2018
2019# qhasm: xmm4 ^= xmm1
2020# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
2021# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
2022pxor %xmm2,%xmm4
2023
2024# qhasm: xmm7 &= xmm4
2025# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
2026# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
2027pand %xmm4,%xmm7
2028
2029# qhasm: xmm4 = xmm11
2030# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
2031# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
2032movdqa %xmm11,%xmm2
2033
2034# qhasm: xmm4 ^= xmm8
2035# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
2036# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
2037pxor %xmm8,%xmm2
2038
2039# qhasm: xmm5 &= xmm4
2040# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
2041# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
2042pand %xmm2,%xmm3
2043
2044# qhasm: xmm3 ^= xmm5
2045# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
2046# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
2047pxor %xmm3,%xmm0
2048
2049# qhasm: xmm2 ^= xmm5
2050# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2051# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2052pxor %xmm3,%xmm1
2053
2054# qhasm: xmm5 = xmm15
2055# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
2056# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
2057movdqa %xmm15,%xmm2
2058
2059# qhasm: xmm5 ^= xmm9
2060# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
2061# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
2062pxor %xmm9,%xmm2
2063
2064# qhasm: xmm4 = xmm13
2065# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
2066# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
2067movdqa %xmm13,%xmm3
2068
2069# qhasm: xmm1 = xmm5
2070# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
2071# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
2072movdqa %xmm2,%xmm4
2073
2074# qhasm: xmm4 ^= xmm14
2075# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
2076# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
2077pxor %xmm14,%xmm3
2078
2079# qhasm: xmm1 |= xmm4
2080# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
2081# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
2082por %xmm3,%xmm4
2083
2084# qhasm: xmm5 &= xmm4
2085# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
2086# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
2087pand %xmm3,%xmm2
2088
2089# qhasm: xmm0 ^= xmm5
2090# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
2091# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
2092pxor %xmm2,%xmm6
2093
2094# qhasm: xmm3 ^= xmm7
2095# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
2096# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
2097pxor %xmm7,%xmm0
2098
2099# qhasm: xmm2 ^= xmm6
2100# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
2101# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
2102pxor %xmm5,%xmm1
2103
2104# qhasm: xmm1 ^= xmm7
2105# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
2106# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
2107pxor %xmm7,%xmm4
2108
2109# qhasm: xmm0 ^= xmm6
2110# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
2111# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
2112pxor %xmm5,%xmm6
2113
2114# qhasm: xmm1 ^= xmm6
2115# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2116# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2117pxor %xmm5,%xmm4
2118
2119# qhasm: xmm4 = xmm10
2120# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
2121# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
2122movdqa %xmm10,%xmm2
2123
2124# qhasm: xmm5 = xmm12
2125# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
2126# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
2127movdqa %xmm12,%xmm3
2128
2129# qhasm: xmm6 = xmm9
2130# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
2131# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
2132movdqa %xmm9,%xmm5
2133
2134# qhasm: xmm7 = xmm15
2135# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
2136# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
2137movdqa %xmm15,%xmm7
2138
2139# qhasm: xmm4 &= xmm11
2140# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
2141# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
2142pand %xmm11,%xmm2
2143
2144# qhasm: xmm5 &= xmm8
2145# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
2146# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
2147pand %xmm8,%xmm3
2148
2149# qhasm: xmm6 &= xmm13
2150# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
2151# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
2152pand %xmm13,%xmm5
2153
2154# qhasm: xmm7 |= xmm14
2155# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
2156# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
2157por %xmm14,%xmm7
2158
2159# qhasm: xmm3 ^= xmm4
2160# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
2161# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
2162pxor %xmm2,%xmm0
2163
2164# qhasm: xmm2 ^= xmm5
2165# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
2166# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
2167pxor %xmm3,%xmm1
2168
2169# qhasm: xmm1 ^= xmm6
2170# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
2171# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
2172pxor %xmm5,%xmm4
2173
2174# qhasm: xmm0 ^= xmm7
2175# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
2176# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
2177pxor %xmm7,%xmm6
2178
2179# qhasm: xmm4 = xmm3
2180# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
2181# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
2182movdqa %xmm0,%xmm2
2183
2184# qhasm: xmm4 ^= xmm2
2185# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
2186# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
2187pxor %xmm1,%xmm2
2188
2189# qhasm: xmm3 &= xmm1
2190# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
2191# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
2192pand %xmm4,%xmm0
2193
2194# qhasm: xmm6 = xmm0
2195# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
2196# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
2197movdqa %xmm6,%xmm3
2198
2199# qhasm: xmm6 ^= xmm3
2200# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
2201# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
2202pxor %xmm0,%xmm3
2203
2204# qhasm: xmm7 = xmm4
2205# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
2206# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
2207movdqa %xmm2,%xmm5
2208
2209# qhasm: xmm7 &= xmm6
2210# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
2211# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
2212pand %xmm3,%xmm5
2213
2214# qhasm: xmm7 ^= xmm2
2215# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
2216# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
2217pxor %xmm1,%xmm5
2218
2219# qhasm: xmm5 = xmm1
2220# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
2221# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
2222movdqa %xmm4,%xmm7
2223
2224# qhasm: xmm5 ^= xmm0
2225# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2226# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2227pxor %xmm6,%xmm7
2228
2229# qhasm: xmm3 ^= xmm2
2230# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
2231# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
2232pxor %xmm1,%xmm0
2233
2234# qhasm: xmm5 &= xmm3
2235# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
2236# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
2237pand %xmm0,%xmm7
2238
2239# qhasm: xmm5 ^= xmm0
2240# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
2241# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
2242pxor %xmm6,%xmm7
2243
2244# qhasm: xmm1 ^= xmm5
2245# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
2246# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
2247pxor %xmm7,%xmm4
2248
2249# qhasm: xmm2 = xmm6
2250# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
2251# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
2252movdqa %xmm3,%xmm0
2253
2254# qhasm: xmm2 ^= xmm5
2255# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
2256# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
2257pxor %xmm7,%xmm0
2258
2259# qhasm: xmm2 &= xmm0
2260# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
2261# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
2262pand %xmm6,%xmm0
2263
2264# qhasm: xmm1 ^= xmm2
2265# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
2266# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
2267pxor %xmm0,%xmm4
2268
2269# qhasm: xmm6 ^= xmm2
2270# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
2271# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
2272pxor %xmm0,%xmm3
2273
2274# qhasm: xmm6 &= xmm7
2275# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
2276# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
2277pand %xmm5,%xmm3
2278
2279# qhasm: xmm6 ^= xmm4
2280# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
2281# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
2282pxor %xmm2,%xmm3
2283
2284# qhasm: xmm4 = xmm14
2285# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
2286# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
2287movdqa %xmm14,%xmm0
2288
2289# qhasm: xmm0 = xmm13
2290# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
2291# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
2292movdqa %xmm13,%xmm1
2293
2294# qhasm: xmm2 = xmm7
2295# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
2296# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
2297movdqa %xmm5,%xmm2
2298
2299# qhasm: xmm2 ^= xmm6
2300# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
2301# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
2302pxor %xmm3,%xmm2
2303
2304# qhasm: xmm2 &= xmm14
2305# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
2306# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
2307pand %xmm14,%xmm2
2308
2309# qhasm: xmm14 ^= xmm13
2310# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2311# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2312pxor %xmm13,%xmm14
2313
2314# qhasm: xmm14 &= xmm6
2315# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
2316# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
2317pand %xmm3,%xmm14
2318
2319# qhasm: xmm13 &= xmm7
2320# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
2321# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
2322pand %xmm5,%xmm13
2323
2324# qhasm: xmm14 ^= xmm13
2325# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
2326# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
2327pxor %xmm13,%xmm14
2328
2329# qhasm: xmm13 ^= xmm2
2330# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
2331# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
2332pxor %xmm2,%xmm13
2333
2334# qhasm: xmm4 ^= xmm8
2335# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
2336# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
2337pxor %xmm8,%xmm0
2338
2339# qhasm: xmm0 ^= xmm11
2340# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
2341# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
2342pxor %xmm11,%xmm1
2343
2344# qhasm: xmm7 ^= xmm5
2345# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2346# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2347pxor %xmm7,%xmm5
2348
2349# qhasm: xmm6 ^= xmm1
2350# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2351# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2352pxor %xmm4,%xmm3
2353
2354# qhasm: xmm3 = xmm7
2355# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2356# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2357movdqa %xmm5,%xmm2
2358
2359# qhasm: xmm3 ^= xmm6
2360# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2361# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2362pxor %xmm3,%xmm2
2363
2364# qhasm: xmm3 &= xmm4
2365# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2366# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2367pand %xmm0,%xmm2
2368
2369# qhasm: xmm4 ^= xmm0
2370# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2371# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2372pxor %xmm1,%xmm0
2373
2374# qhasm: xmm4 &= xmm6
2375# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2376# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2377pand %xmm3,%xmm0
2378
2379# qhasm: xmm0 &= xmm7
2380# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2381# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2382pand %xmm5,%xmm1
2383
2384# qhasm: xmm0 ^= xmm4
2385# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2386# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2387pxor %xmm0,%xmm1
2388
2389# qhasm: xmm4 ^= xmm3
2390# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2391# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2392pxor %xmm2,%xmm0
2393
2394# qhasm: xmm2 = xmm5
2395# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2396# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2397movdqa %xmm7,%xmm2
2398
2399# qhasm: xmm2 ^= xmm1
2400# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2401# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2402pxor %xmm4,%xmm2
2403
2404# qhasm: xmm2 &= xmm8
2405# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
2406# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
2407pand %xmm8,%xmm2
2408
2409# qhasm: xmm8 ^= xmm11
2410# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2411# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2412pxor %xmm11,%xmm8
2413
2414# qhasm: xmm8 &= xmm1
2415# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
2416# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
2417pand %xmm4,%xmm8
2418
2419# qhasm: xmm11 &= xmm5
2420# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
2421# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
2422pand %xmm7,%xmm11
2423
2424# qhasm: xmm8 ^= xmm11
2425# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
2426# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
2427pxor %xmm11,%xmm8
2428
2429# qhasm: xmm11 ^= xmm2
2430# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
2431# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
2432pxor %xmm2,%xmm11
2433
2434# qhasm: xmm14 ^= xmm4
2435# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
2436# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
2437pxor %xmm0,%xmm14
2438
2439# qhasm: xmm8 ^= xmm4
2440# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
2441# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
2442pxor %xmm0,%xmm8
2443
2444# qhasm: xmm13 ^= xmm0
2445# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
2446# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
2447pxor %xmm1,%xmm13
2448
2449# qhasm: xmm11 ^= xmm0
2450# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
2451# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
2452pxor %xmm1,%xmm11
2453
2454# qhasm: xmm4 = xmm15
2455# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
2456# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
2457movdqa %xmm15,%xmm0
2458
2459# qhasm: xmm0 = xmm9
2460# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
2461# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
2462movdqa %xmm9,%xmm1
2463
2464# qhasm: xmm4 ^= xmm12
2465# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
2466# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
2467pxor %xmm12,%xmm0
2468
2469# qhasm: xmm0 ^= xmm10
2470# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
2471# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
2472pxor %xmm10,%xmm1
2473
2474# qhasm: xmm3 = xmm7
2475# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2476# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2477movdqa %xmm5,%xmm2
2478
2479# qhasm: xmm3 ^= xmm6
2480# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2481# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2482pxor %xmm3,%xmm2
2483
2484# qhasm: xmm3 &= xmm4
2485# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
2486# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
2487pand %xmm0,%xmm2
2488
2489# qhasm: xmm4 ^= xmm0
2490# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
2491# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
2492pxor %xmm1,%xmm0
2493
2494# qhasm: xmm4 &= xmm6
2495# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
2496# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
2497pand %xmm3,%xmm0
2498
2499# qhasm: xmm0 &= xmm7
2500# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
2501# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
2502pand %xmm5,%xmm1
2503
2504# qhasm: xmm0 ^= xmm4
2505# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
2506# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
2507pxor %xmm0,%xmm1
2508
2509# qhasm: xmm4 ^= xmm3
2510# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
2511# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
2512pxor %xmm2,%xmm0
2513
2514# qhasm: xmm2 = xmm5
2515# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
2516# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
2517movdqa %xmm7,%xmm2
2518
2519# qhasm: xmm2 ^= xmm1
2520# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
2521# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
2522pxor %xmm4,%xmm2
2523
2524# qhasm: xmm2 &= xmm12
2525# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
2526# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
2527pand %xmm12,%xmm2
2528
2529# qhasm: xmm12 ^= xmm10
2530# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2531# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2532pxor %xmm10,%xmm12
2533
2534# qhasm: xmm12 &= xmm1
2535# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
2536# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
2537pand %xmm4,%xmm12
2538
2539# qhasm: xmm10 &= xmm5
2540# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
2541# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
2542pand %xmm7,%xmm10
2543
2544# qhasm: xmm12 ^= xmm10
2545# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
2546# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
2547pxor %xmm10,%xmm12
2548
2549# qhasm: xmm10 ^= xmm2
2550# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
2551# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
2552pxor %xmm2,%xmm10
2553
2554# qhasm: xmm7 ^= xmm5
2555# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
2556# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
2557pxor %xmm7,%xmm5
2558
2559# qhasm: xmm6 ^= xmm1
2560# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
2561# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
2562pxor %xmm4,%xmm3
2563
2564# qhasm: xmm3 = xmm7
2565# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
2566# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
2567movdqa %xmm5,%xmm2
2568
2569# qhasm: xmm3 ^= xmm6
2570# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
2571# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
2572pxor %xmm3,%xmm2
2573
2574# qhasm: xmm3 &= xmm15
2575# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
2576# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
2577pand %xmm15,%xmm2
2578
2579# qhasm: xmm15 ^= xmm9
2580# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2581# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2582pxor %xmm9,%xmm15
2583
2584# qhasm: xmm15 &= xmm6
2585# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
2586# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
2587pand %xmm3,%xmm15
2588
2589# qhasm: xmm9 &= xmm7
2590# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
2591# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
2592pand %xmm5,%xmm9
2593
2594# qhasm: xmm15 ^= xmm9
2595# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
2596# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
2597pxor %xmm9,%xmm15
2598
2599# qhasm: xmm9 ^= xmm3
2600# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
2601# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
2602pxor %xmm2,%xmm9
2603
2604# qhasm: xmm15 ^= xmm4
2605# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
2606# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
2607pxor %xmm0,%xmm15
2608
2609# qhasm: xmm12 ^= xmm4
2610# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
2611# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
2612pxor %xmm0,%xmm12
2613
2614# qhasm: xmm9 ^= xmm0
2615# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
2616# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
2617pxor %xmm1,%xmm9
2618
2619# qhasm: xmm10 ^= xmm0
2620# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
2621# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
2622pxor %xmm1,%xmm10
2623
2624# qhasm: xmm15 ^= xmm8
2625# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
2626# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
2627pxor %xmm8,%xmm15
2628
2629# qhasm: xmm9 ^= xmm14
2630# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
2631# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
2632pxor %xmm14,%xmm9
2633
2634# qhasm: xmm12 ^= xmm15
2635# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
2636# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
2637pxor %xmm15,%xmm12
2638
2639# qhasm: xmm14 ^= xmm8
2640# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
2641# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
2642pxor %xmm8,%xmm14
2643
2644# qhasm: xmm8 ^= xmm9
2645# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
2646# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
2647pxor %xmm9,%xmm8
2648
2649# qhasm: xmm9 ^= xmm13
2650# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
2651# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
2652pxor %xmm13,%xmm9
2653
2654# qhasm: xmm13 ^= xmm10
2655# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
2656# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
2657pxor %xmm10,%xmm13
2658
2659# qhasm: xmm12 ^= xmm13
2660# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
2661# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
2662pxor %xmm13,%xmm12
2663
2664# qhasm: xmm10 ^= xmm11
2665# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
2666# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
2667pxor %xmm11,%xmm10
2668
2669# qhasm: xmm11 ^= xmm13
2670# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
2671# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
2672pxor %xmm13,%xmm11
2673
2674# qhasm: xmm14 ^= xmm11
2675# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
2676# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
2677pxor %xmm11,%xmm14
2678
2679# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
2680# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
2681# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
2682pshufd $0x93,%xmm8,%xmm0
2683
2684# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
2685# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
2686# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
2687pshufd $0x93,%xmm9,%xmm1
2688
2689# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
2690# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
2691# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
2692pshufd $0x93,%xmm12,%xmm2
2693
2694# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
2695# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
2696# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
2697pshufd $0x93,%xmm14,%xmm3
2698
2699# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
2700# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
2701# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
2702pshufd $0x93,%xmm11,%xmm4
2703
2704# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
2705# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
2706# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
2707pshufd $0x93,%xmm15,%xmm5
2708
2709# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
2710# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
2711# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
2712pshufd $0x93,%xmm10,%xmm6
2713
2714# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
2715# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
2716# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
2717pshufd $0x93,%xmm13,%xmm7
2718
2719# qhasm: xmm8 ^= xmm0
2720# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
2721# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
2722pxor %xmm0,%xmm8
2723
2724# qhasm: xmm9 ^= xmm1
2725# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
2726# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
2727pxor %xmm1,%xmm9
2728
2729# qhasm: xmm12 ^= xmm2
2730# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
2731# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
2732pxor %xmm2,%xmm12
2733
2734# qhasm: xmm14 ^= xmm3
2735# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
2736# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
2737pxor %xmm3,%xmm14
2738
2739# qhasm: xmm11 ^= xmm4
2740# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
2741# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
2742pxor %xmm4,%xmm11
2743
2744# qhasm: xmm15 ^= xmm5
2745# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
2746# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
2747pxor %xmm5,%xmm15
2748
2749# qhasm: xmm10 ^= xmm6
2750# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
2751# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
2752pxor %xmm6,%xmm10
2753
2754# qhasm: xmm13 ^= xmm7
2755# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
2756# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
2757pxor %xmm7,%xmm13
2758
2759# qhasm: xmm0 ^= xmm13
2760# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
2761# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
2762pxor %xmm13,%xmm0
2763
2764# qhasm: xmm1 ^= xmm8
2765# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
2766# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
2767pxor %xmm8,%xmm1
2768
2769# qhasm: xmm2 ^= xmm9
2770# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
2771# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
2772pxor %xmm9,%xmm2
2773
2774# qhasm: xmm1 ^= xmm13
2775# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
2776# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
2777pxor %xmm13,%xmm1
2778
2779# qhasm: xmm3 ^= xmm12
2780# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
2781# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
2782pxor %xmm12,%xmm3
2783
2784# qhasm: xmm4 ^= xmm14
2785# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
2786# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
2787pxor %xmm14,%xmm4
2788
2789# qhasm: xmm5 ^= xmm11
2790# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
2791# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
2792pxor %xmm11,%xmm5
2793
2794# qhasm: xmm3 ^= xmm13
2795# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
2796# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
2797pxor %xmm13,%xmm3
2798
2799# qhasm: xmm6 ^= xmm15
2800# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
2801# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
2802pxor %xmm15,%xmm6
2803
2804# qhasm: xmm7 ^= xmm10
2805# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
2806# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
2807pxor %xmm10,%xmm7
2808
2809# qhasm: xmm4 ^= xmm13
2810# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
2811# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
2812pxor %xmm13,%xmm4
2813
2814# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
2815# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
2816# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
2817pshufd $0x4E,%xmm8,%xmm8
2818
2819# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
2820# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
2821# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
2822pshufd $0x4E,%xmm9,%xmm9
2823
2824# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
2825# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
2826# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
2827pshufd $0x4E,%xmm12,%xmm12
2828
2829# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
2830# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
2831# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
2832pshufd $0x4E,%xmm14,%xmm14
2833
2834# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
2835# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
2836# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
2837pshufd $0x4E,%xmm11,%xmm11
2838
2839# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
2840# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
2841# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
2842pshufd $0x4E,%xmm15,%xmm15
2843
2844# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
2845# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
2846# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
2847pshufd $0x4E,%xmm10,%xmm10
2848
2849# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
2850# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
2851# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
2852pshufd $0x4E,%xmm13,%xmm13
2853
2854# qhasm: xmm0 ^= xmm8
2855# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
2856# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
2857pxor %xmm8,%xmm0
2858
2859# qhasm: xmm1 ^= xmm9
2860# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
2861# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
2862pxor %xmm9,%xmm1
2863
2864# qhasm: xmm2 ^= xmm12
2865# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
2866# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
2867pxor %xmm12,%xmm2
2868
2869# qhasm: xmm3 ^= xmm14
2870# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
2871# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
2872pxor %xmm14,%xmm3
2873
2874# qhasm: xmm4 ^= xmm11
2875# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
2876# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
2877pxor %xmm11,%xmm4
2878
2879# qhasm: xmm5 ^= xmm15
2880# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
2881# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
2882pxor %xmm15,%xmm5
2883
2884# qhasm: xmm6 ^= xmm10
2885# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
2886# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
2887pxor %xmm10,%xmm6
2888
2889# qhasm: xmm7 ^= xmm13
2890# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
2891# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
2892pxor %xmm13,%xmm7
2893
2894# qhasm: xmm0 ^= *(int128 *)(c + 256)
2895# asm 1: pxor 256(<c=int64#5),<xmm0=int6464#1
2896# asm 2: pxor 256(<c=%r8),<xmm0=%xmm0
2897pxor 256(%r8),%xmm0
2898
2899# qhasm: shuffle bytes of xmm0 by SR
2900# asm 1: pshufb SR,<xmm0=int6464#1
2901# asm 2: pshufb SR,<xmm0=%xmm0
2902pshufb SR,%xmm0
2903
2904# qhasm: xmm1 ^= *(int128 *)(c + 272)
2905# asm 1: pxor 272(<c=int64#5),<xmm1=int6464#2
2906# asm 2: pxor 272(<c=%r8),<xmm1=%xmm1
2907pxor 272(%r8),%xmm1
2908
2909# qhasm: shuffle bytes of xmm1 by SR
2910# asm 1: pshufb SR,<xmm1=int6464#2
2911# asm 2: pshufb SR,<xmm1=%xmm1
2912pshufb SR,%xmm1
2913
2914# qhasm: xmm2 ^= *(int128 *)(c + 288)
2915# asm 1: pxor 288(<c=int64#5),<xmm2=int6464#3
2916# asm 2: pxor 288(<c=%r8),<xmm2=%xmm2
2917pxor 288(%r8),%xmm2
2918
2919# qhasm: shuffle bytes of xmm2 by SR
2920# asm 1: pshufb SR,<xmm2=int6464#3
2921# asm 2: pshufb SR,<xmm2=%xmm2
2922pshufb SR,%xmm2
2923
2924# qhasm: xmm3 ^= *(int128 *)(c + 304)
2925# asm 1: pxor 304(<c=int64#5),<xmm3=int6464#4
2926# asm 2: pxor 304(<c=%r8),<xmm3=%xmm3
2927pxor 304(%r8),%xmm3
2928
2929# qhasm: shuffle bytes of xmm3 by SR
2930# asm 1: pshufb SR,<xmm3=int6464#4
2931# asm 2: pshufb SR,<xmm3=%xmm3
2932pshufb SR,%xmm3
2933
2934# qhasm: xmm4 ^= *(int128 *)(c + 320)
2935# asm 1: pxor 320(<c=int64#5),<xmm4=int6464#5
2936# asm 2: pxor 320(<c=%r8),<xmm4=%xmm4
2937pxor 320(%r8),%xmm4
2938
2939# qhasm: shuffle bytes of xmm4 by SR
2940# asm 1: pshufb SR,<xmm4=int6464#5
2941# asm 2: pshufb SR,<xmm4=%xmm4
2942pshufb SR,%xmm4
2943
2944# qhasm: xmm5 ^= *(int128 *)(c + 336)
2945# asm 1: pxor 336(<c=int64#5),<xmm5=int6464#6
2946# asm 2: pxor 336(<c=%r8),<xmm5=%xmm5
2947pxor 336(%r8),%xmm5
2948
2949# qhasm: shuffle bytes of xmm5 by SR
2950# asm 1: pshufb SR,<xmm5=int6464#6
2951# asm 2: pshufb SR,<xmm5=%xmm5
2952pshufb SR,%xmm5
2953
2954# qhasm: xmm6 ^= *(int128 *)(c + 352)
2955# asm 1: pxor 352(<c=int64#5),<xmm6=int6464#7
2956# asm 2: pxor 352(<c=%r8),<xmm6=%xmm6
2957pxor 352(%r8),%xmm6
2958
2959# qhasm: shuffle bytes of xmm6 by SR
2960# asm 1: pshufb SR,<xmm6=int6464#7
2961# asm 2: pshufb SR,<xmm6=%xmm6
2962pshufb SR,%xmm6
2963
2964# qhasm: xmm7 ^= *(int128 *)(c + 368)
2965# asm 1: pxor 368(<c=int64#5),<xmm7=int6464#8
2966# asm 2: pxor 368(<c=%r8),<xmm7=%xmm7
2967pxor 368(%r8),%xmm7
2968
2969# qhasm: shuffle bytes of xmm7 by SR
2970# asm 1: pshufb SR,<xmm7=int6464#8
2971# asm 2: pshufb SR,<xmm7=%xmm7
2972pshufb SR,%xmm7
2973
2974# qhasm: xmm5 ^= xmm6
2975# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
2976# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
2977pxor %xmm6,%xmm5
2978
2979# qhasm: xmm2 ^= xmm1
2980# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
2981# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
2982pxor %xmm1,%xmm2
2983
2984# qhasm: xmm5 ^= xmm0
2985# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
2986# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
2987pxor %xmm0,%xmm5
2988
2989# qhasm: xmm6 ^= xmm2
2990# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
2991# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
2992pxor %xmm2,%xmm6
2993
2994# qhasm: xmm3 ^= xmm0
2995# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
2996# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
2997pxor %xmm0,%xmm3
2998
2999# qhasm: xmm6 ^= xmm3
3000# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3001# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3002pxor %xmm3,%xmm6
3003
3004# qhasm: xmm3 ^= xmm7
3005# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
3006# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
3007pxor %xmm7,%xmm3
3008
3009# qhasm: xmm3 ^= xmm4
3010# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
3011# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
3012pxor %xmm4,%xmm3
3013
3014# qhasm: xmm7 ^= xmm5
3015# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
3016# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
3017pxor %xmm5,%xmm7
3018
3019# qhasm: xmm3 ^= xmm1
3020# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
3021# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
3022pxor %xmm1,%xmm3
3023
3024# qhasm: xmm4 ^= xmm5
3025# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3026# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3027pxor %xmm5,%xmm4
3028
3029# qhasm: xmm2 ^= xmm7
3030# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
3031# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
3032pxor %xmm7,%xmm2
3033
3034# qhasm: xmm1 ^= xmm5
3035# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3036# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3037pxor %xmm5,%xmm1
3038
3039# qhasm: xmm11 = xmm7
3040# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
3041# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
3042movdqa %xmm7,%xmm8
3043
3044# qhasm: xmm10 = xmm1
3045# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
3046# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
3047movdqa %xmm1,%xmm9
3048
3049# qhasm: xmm9 = xmm5
3050# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
3051# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
3052movdqa %xmm5,%xmm10
3053
3054# qhasm: xmm13 = xmm2
3055# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
3056# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
3057movdqa %xmm2,%xmm11
3058
3059# qhasm: xmm12 = xmm6
3060# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
3061# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
3062movdqa %xmm6,%xmm12
3063
3064# qhasm: xmm11 ^= xmm4
3065# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
3066# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
3067pxor %xmm4,%xmm8
3068
3069# qhasm: xmm10 ^= xmm2
3070# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
3071# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
3072pxor %xmm2,%xmm9
3073
3074# qhasm: xmm9 ^= xmm3
3075# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
3076# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
3077pxor %xmm3,%xmm10
3078
3079# qhasm: xmm13 ^= xmm4
3080# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
3081# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
3082pxor %xmm4,%xmm11
3083
3084# qhasm: xmm12 ^= xmm0
3085# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
3086# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
3087pxor %xmm0,%xmm12
3088
3089# qhasm: xmm14 = xmm11
3090# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
3091# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
3092movdqa %xmm8,%xmm13
3093
3094# qhasm: xmm8 = xmm10
3095# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
3096# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
3097movdqa %xmm9,%xmm14
3098
3099# qhasm: xmm15 = xmm11
3100# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
3101# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
3102movdqa %xmm8,%xmm15
3103
3104# qhasm: xmm10 |= xmm9
3105# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
3106# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
3107por %xmm10,%xmm9
3108
3109# qhasm: xmm11 |= xmm12
3110# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
3111# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
3112por %xmm12,%xmm8
3113
3114# qhasm: xmm15 ^= xmm8
3115# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
3116# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
3117pxor %xmm14,%xmm15
3118
3119# qhasm: xmm14 &= xmm12
3120# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
3121# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
3122pand %xmm12,%xmm13
3123
3124# qhasm: xmm8 &= xmm9
3125# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
3126# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
3127pand %xmm10,%xmm14
3128
3129# qhasm: xmm12 ^= xmm9
3130# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
3131# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
3132pxor %xmm10,%xmm12
3133
3134# qhasm: xmm15 &= xmm12
3135# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
3136# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
3137pand %xmm12,%xmm15
3138
3139# qhasm: xmm12 = xmm3
3140# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
3141# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
3142movdqa %xmm3,%xmm10
3143
3144# qhasm: xmm12 ^= xmm0
3145# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
3146# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
3147pxor %xmm0,%xmm10
3148
3149# qhasm: xmm13 &= xmm12
3150# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
3151# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
3152pand %xmm10,%xmm11
3153
3154# qhasm: xmm11 ^= xmm13
3155# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
3156# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
3157pxor %xmm11,%xmm8
3158
3159# qhasm: xmm10 ^= xmm13
3160# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3161# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3162pxor %xmm11,%xmm9
3163
3164# qhasm: xmm13 = xmm7
3165# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
3166# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
3167movdqa %xmm7,%xmm10
3168
3169# qhasm: xmm13 ^= xmm1
3170# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
3171# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
3172pxor %xmm1,%xmm10
3173
3174# qhasm: xmm12 = xmm5
3175# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
3176# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
3177movdqa %xmm5,%xmm11
3178
3179# qhasm: xmm9 = xmm13
3180# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
3181# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
3182movdqa %xmm10,%xmm12
3183
3184# qhasm: xmm12 ^= xmm6
3185# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
3186# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
3187pxor %xmm6,%xmm11
3188
3189# qhasm: xmm9 |= xmm12
3190# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
3191# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
3192por %xmm11,%xmm12
3193
3194# qhasm: xmm13 &= xmm12
3195# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
3196# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
3197pand %xmm11,%xmm10
3198
3199# qhasm: xmm8 ^= xmm13
3200# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
3201# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
3202pxor %xmm10,%xmm14
3203
3204# qhasm: xmm11 ^= xmm15
3205# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
3206# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
3207pxor %xmm15,%xmm8
3208
3209# qhasm: xmm10 ^= xmm14
3210# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
3211# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
3212pxor %xmm13,%xmm9
3213
3214# qhasm: xmm9 ^= xmm15
3215# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
3216# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
3217pxor %xmm15,%xmm12
3218
3219# qhasm: xmm8 ^= xmm14
3220# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
3221# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
3222pxor %xmm13,%xmm14
3223
3224# qhasm: xmm9 ^= xmm14
3225# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3226# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3227pxor %xmm13,%xmm12
3228
3229# qhasm: xmm12 = xmm2
3230# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
3231# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
3232movdqa %xmm2,%xmm10
3233
3234# qhasm: xmm13 = xmm4
3235# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
3236# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
3237movdqa %xmm4,%xmm11
3238
3239# qhasm: xmm14 = xmm1
3240# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
3241# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
3242movdqa %xmm1,%xmm13
3243
3244# qhasm: xmm15 = xmm7
3245# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
3246# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
3247movdqa %xmm7,%xmm15
3248
3249# qhasm: xmm12 &= xmm3
3250# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
3251# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
3252pand %xmm3,%xmm10
3253
3254# qhasm: xmm13 &= xmm0
3255# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
3256# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
3257pand %xmm0,%xmm11
3258
3259# qhasm: xmm14 &= xmm5
3260# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
3261# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
3262pand %xmm5,%xmm13
3263
3264# qhasm: xmm15 |= xmm6
3265# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
3266# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
3267por %xmm6,%xmm15
3268
3269# qhasm: xmm11 ^= xmm12
3270# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
3271# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
3272pxor %xmm10,%xmm8
3273
3274# qhasm: xmm10 ^= xmm13
3275# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
3276# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
3277pxor %xmm11,%xmm9
3278
3279# qhasm: xmm9 ^= xmm14
3280# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
3281# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
3282pxor %xmm13,%xmm12
3283
3284# qhasm: xmm8 ^= xmm15
3285# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
3286# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
3287pxor %xmm15,%xmm14
3288
3289# qhasm: xmm12 = xmm11
3290# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
3291# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
3292movdqa %xmm8,%xmm10
3293
3294# qhasm: xmm12 ^= xmm10
3295# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
3296# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
3297pxor %xmm9,%xmm10
3298
3299# qhasm: xmm11 &= xmm9
3300# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
3301# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
3302pand %xmm12,%xmm8
3303
3304# qhasm: xmm14 = xmm8
3305# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
3306# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
3307movdqa %xmm14,%xmm11
3308
3309# qhasm: xmm14 ^= xmm11
3310# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
3311# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
3312pxor %xmm8,%xmm11
3313
3314# qhasm: xmm15 = xmm12
3315# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
3316# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
3317movdqa %xmm10,%xmm13
3318
3319# qhasm: xmm15 &= xmm14
3320# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
3321# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
3322pand %xmm11,%xmm13
3323
3324# qhasm: xmm15 ^= xmm10
3325# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
3326# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
3327pxor %xmm9,%xmm13
3328
3329# qhasm: xmm13 = xmm9
3330# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
3331# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
3332movdqa %xmm12,%xmm15
3333
3334# qhasm: xmm13 ^= xmm8
3335# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3336# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3337pxor %xmm14,%xmm15
3338
3339# qhasm: xmm11 ^= xmm10
3340# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
3341# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
3342pxor %xmm9,%xmm8
3343
3344# qhasm: xmm13 &= xmm11
3345# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
3346# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
3347pand %xmm8,%xmm15
3348
3349# qhasm: xmm13 ^= xmm8
3350# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
3351# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
3352pxor %xmm14,%xmm15
3353
3354# qhasm: xmm9 ^= xmm13
3355# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
3356# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
3357pxor %xmm15,%xmm12
3358
3359# qhasm: xmm10 = xmm14
3360# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
3361# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
3362movdqa %xmm11,%xmm8
3363
3364# qhasm: xmm10 ^= xmm13
3365# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
3366# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
3367pxor %xmm15,%xmm8
3368
3369# qhasm: xmm10 &= xmm8
3370# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
3371# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
3372pand %xmm14,%xmm8
3373
3374# qhasm: xmm9 ^= xmm10
3375# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
3376# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
3377pxor %xmm8,%xmm12
3378
3379# qhasm: xmm14 ^= xmm10
3380# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
3381# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
3382pxor %xmm8,%xmm11
3383
3384# qhasm: xmm14 &= xmm15
3385# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
3386# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
3387pand %xmm13,%xmm11
3388
3389# qhasm: xmm14 ^= xmm12
3390# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
3391# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
3392pxor %xmm10,%xmm11
3393
3394# qhasm: xmm12 = xmm6
3395# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
3396# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
3397movdqa %xmm6,%xmm8
3398
3399# qhasm: xmm8 = xmm5
3400# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
3401# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
3402movdqa %xmm5,%xmm9
3403
3404# qhasm: xmm10 = xmm15
3405# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
3406# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
3407movdqa %xmm13,%xmm10
3408
3409# qhasm: xmm10 ^= xmm14
3410# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
3411# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
3412pxor %xmm11,%xmm10
3413
3414# qhasm: xmm10 &= xmm6
3415# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
3416# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
3417pand %xmm6,%xmm10
3418
3419# qhasm: xmm6 ^= xmm5
3420# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3421# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3422pxor %xmm5,%xmm6
3423
3424# qhasm: xmm6 &= xmm14
3425# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
3426# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
3427pand %xmm11,%xmm6
3428
3429# qhasm: xmm5 &= xmm15
3430# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
3431# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
3432pand %xmm13,%xmm5
3433
3434# qhasm: xmm6 ^= xmm5
3435# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
3436# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
3437pxor %xmm5,%xmm6
3438
3439# qhasm: xmm5 ^= xmm10
3440# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
3441# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
3442pxor %xmm10,%xmm5
3443
3444# qhasm: xmm12 ^= xmm0
3445# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
3446# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
3447pxor %xmm0,%xmm8
3448
3449# qhasm: xmm8 ^= xmm3
3450# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
3451# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
3452pxor %xmm3,%xmm9
3453
3454# qhasm: xmm15 ^= xmm13
3455# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3456# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3457pxor %xmm15,%xmm13
3458
3459# qhasm: xmm14 ^= xmm9
3460# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3461# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3462pxor %xmm12,%xmm11
3463
3464# qhasm: xmm11 = xmm15
3465# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3466# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3467movdqa %xmm13,%xmm10
3468
3469# qhasm: xmm11 ^= xmm14
3470# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3471# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3472pxor %xmm11,%xmm10
3473
3474# qhasm: xmm11 &= xmm12
3475# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3476# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3477pand %xmm8,%xmm10
3478
3479# qhasm: xmm12 ^= xmm8
3480# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3481# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3482pxor %xmm9,%xmm8
3483
3484# qhasm: xmm12 &= xmm14
3485# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3486# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3487pand %xmm11,%xmm8
3488
3489# qhasm: xmm8 &= xmm15
3490# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3491# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3492pand %xmm13,%xmm9
3493
3494# qhasm: xmm8 ^= xmm12
3495# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3496# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3497pxor %xmm8,%xmm9
3498
3499# qhasm: xmm12 ^= xmm11
3500# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3501# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3502pxor %xmm10,%xmm8
3503
3504# qhasm: xmm10 = xmm13
3505# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3506# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3507movdqa %xmm15,%xmm10
3508
3509# qhasm: xmm10 ^= xmm9
3510# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3511# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3512pxor %xmm12,%xmm10
3513
3514# qhasm: xmm10 &= xmm0
3515# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
3516# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
3517pand %xmm0,%xmm10
3518
3519# qhasm: xmm0 ^= xmm3
3520# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3521# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3522pxor %xmm3,%xmm0
3523
3524# qhasm: xmm0 &= xmm9
3525# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
3526# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
3527pand %xmm12,%xmm0
3528
3529# qhasm: xmm3 &= xmm13
3530# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
3531# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
3532pand %xmm15,%xmm3
3533
3534# qhasm: xmm0 ^= xmm3
3535# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
3536# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
3537pxor %xmm3,%xmm0
3538
3539# qhasm: xmm3 ^= xmm10
3540# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
3541# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
3542pxor %xmm10,%xmm3
3543
3544# qhasm: xmm6 ^= xmm12
3545# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
3546# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
3547pxor %xmm8,%xmm6
3548
3549# qhasm: xmm0 ^= xmm12
3550# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
3551# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
3552pxor %xmm8,%xmm0
3553
3554# qhasm: xmm5 ^= xmm8
3555# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
3556# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
3557pxor %xmm9,%xmm5
3558
3559# qhasm: xmm3 ^= xmm8
3560# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
3561# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
3562pxor %xmm9,%xmm3
3563
3564# qhasm: xmm12 = xmm7
3565# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
3566# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
3567movdqa %xmm7,%xmm8
3568
3569# qhasm: xmm8 = xmm1
3570# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
3571# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
3572movdqa %xmm1,%xmm9
3573
3574# qhasm: xmm12 ^= xmm4
3575# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
3576# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
3577pxor %xmm4,%xmm8
3578
3579# qhasm: xmm8 ^= xmm2
3580# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
3581# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
3582pxor %xmm2,%xmm9
3583
3584# qhasm: xmm11 = xmm15
3585# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3586# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3587movdqa %xmm13,%xmm10
3588
3589# qhasm: xmm11 ^= xmm14
3590# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3591# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3592pxor %xmm11,%xmm10
3593
3594# qhasm: xmm11 &= xmm12
3595# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
3596# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
3597pand %xmm8,%xmm10
3598
3599# qhasm: xmm12 ^= xmm8
3600# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
3601# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
3602pxor %xmm9,%xmm8
3603
3604# qhasm: xmm12 &= xmm14
3605# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
3606# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
3607pand %xmm11,%xmm8
3608
3609# qhasm: xmm8 &= xmm15
3610# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
3611# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
3612pand %xmm13,%xmm9
3613
3614# qhasm: xmm8 ^= xmm12
3615# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
3616# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
3617pxor %xmm8,%xmm9
3618
3619# qhasm: xmm12 ^= xmm11
3620# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
3621# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
3622pxor %xmm10,%xmm8
3623
3624# qhasm: xmm10 = xmm13
3625# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
3626# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
3627movdqa %xmm15,%xmm10
3628
3629# qhasm: xmm10 ^= xmm9
3630# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
3631# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
3632pxor %xmm12,%xmm10
3633
3634# qhasm: xmm10 &= xmm4
3635# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
3636# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
3637pand %xmm4,%xmm10
3638
3639# qhasm: xmm4 ^= xmm2
3640# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3641# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3642pxor %xmm2,%xmm4
3643
3644# qhasm: xmm4 &= xmm9
3645# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
3646# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
3647pand %xmm12,%xmm4
3648
3649# qhasm: xmm2 &= xmm13
3650# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
3651# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
3652pand %xmm15,%xmm2
3653
3654# qhasm: xmm4 ^= xmm2
3655# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
3656# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
3657pxor %xmm2,%xmm4
3658
3659# qhasm: xmm2 ^= xmm10
3660# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
3661# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
3662pxor %xmm10,%xmm2
3663
3664# qhasm: xmm15 ^= xmm13
3665# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
3666# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
3667pxor %xmm15,%xmm13
3668
3669# qhasm: xmm14 ^= xmm9
3670# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
3671# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
3672pxor %xmm12,%xmm11
3673
3674# qhasm: xmm11 = xmm15
3675# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
3676# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
3677movdqa %xmm13,%xmm10
3678
3679# qhasm: xmm11 ^= xmm14
3680# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
3681# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
3682pxor %xmm11,%xmm10
3683
3684# qhasm: xmm11 &= xmm7
3685# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
3686# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
3687pand %xmm7,%xmm10
3688
3689# qhasm: xmm7 ^= xmm1
3690# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3691# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3692pxor %xmm1,%xmm7
3693
3694# qhasm: xmm7 &= xmm14
3695# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
3696# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
3697pand %xmm11,%xmm7
3698
3699# qhasm: xmm1 &= xmm15
3700# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
3701# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
3702pand %xmm13,%xmm1
3703
3704# qhasm: xmm7 ^= xmm1
3705# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
3706# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
3707pxor %xmm1,%xmm7
3708
3709# qhasm: xmm1 ^= xmm11
3710# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
3711# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
3712pxor %xmm10,%xmm1
3713
3714# qhasm: xmm7 ^= xmm12
3715# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
3716# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
3717pxor %xmm8,%xmm7
3718
3719# qhasm: xmm4 ^= xmm12
3720# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
3721# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
3722pxor %xmm8,%xmm4
3723
3724# qhasm: xmm1 ^= xmm8
3725# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
3726# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
3727pxor %xmm9,%xmm1
3728
3729# qhasm: xmm2 ^= xmm8
3730# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
3731# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
3732pxor %xmm9,%xmm2
3733
3734# qhasm: xmm7 ^= xmm0
3735# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
3736# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
3737pxor %xmm0,%xmm7
3738
3739# qhasm: xmm1 ^= xmm6
3740# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
3741# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
3742pxor %xmm6,%xmm1
3743
3744# qhasm: xmm4 ^= xmm7
3745# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
3746# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
3747pxor %xmm7,%xmm4
3748
3749# qhasm: xmm6 ^= xmm0
3750# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
3751# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
3752pxor %xmm0,%xmm6
3753
3754# qhasm: xmm0 ^= xmm1
3755# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
3756# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
3757pxor %xmm1,%xmm0
3758
3759# qhasm: xmm1 ^= xmm5
3760# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
3761# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
3762pxor %xmm5,%xmm1
3763
3764# qhasm: xmm5 ^= xmm2
3765# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
3766# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
3767pxor %xmm2,%xmm5
3768
3769# qhasm: xmm4 ^= xmm5
3770# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
3771# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
3772pxor %xmm5,%xmm4
3773
3774# qhasm: xmm2 ^= xmm3
3775# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
3776# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
3777pxor %xmm3,%xmm2
3778
3779# qhasm: xmm3 ^= xmm5
3780# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
3781# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
3782pxor %xmm5,%xmm3
3783
3784# qhasm: xmm6 ^= xmm3
3785# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
3786# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
3787pxor %xmm3,%xmm6
3788
3789# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
3790# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
3791# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
3792pshufd $0x93,%xmm0,%xmm8
3793
3794# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
3795# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
3796# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
3797pshufd $0x93,%xmm1,%xmm9
3798
3799# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
3800# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
3801# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
3802pshufd $0x93,%xmm4,%xmm10
3803
3804# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
3805# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
3806# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
3807pshufd $0x93,%xmm6,%xmm11
3808
3809# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
3810# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
3811# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
3812pshufd $0x93,%xmm3,%xmm12
3813
3814# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
3815# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
3816# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
3817pshufd $0x93,%xmm7,%xmm13
3818
3819# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
3820# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
3821# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
3822pshufd $0x93,%xmm2,%xmm14
3823
3824# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
3825# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
3826# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
3827pshufd $0x93,%xmm5,%xmm15
3828
3829# qhasm: xmm0 ^= xmm8
3830# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
3831# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
3832pxor %xmm8,%xmm0
3833
3834# qhasm: xmm1 ^= xmm9
3835# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
3836# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
3837pxor %xmm9,%xmm1
3838
3839# qhasm: xmm4 ^= xmm10
3840# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
3841# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
3842pxor %xmm10,%xmm4
3843
3844# qhasm: xmm6 ^= xmm11
3845# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
3846# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
3847pxor %xmm11,%xmm6
3848
3849# qhasm: xmm3 ^= xmm12
3850# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
3851# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
3852pxor %xmm12,%xmm3
3853
3854# qhasm: xmm7 ^= xmm13
3855# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
3856# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
3857pxor %xmm13,%xmm7
3858
3859# qhasm: xmm2 ^= xmm14
3860# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
3861# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
3862pxor %xmm14,%xmm2
3863
3864# qhasm: xmm5 ^= xmm15
3865# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
3866# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
3867pxor %xmm15,%xmm5
3868
3869# qhasm: xmm8 ^= xmm5
3870# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
3871# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
3872pxor %xmm5,%xmm8
3873
3874# qhasm: xmm9 ^= xmm0
3875# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
3876# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
3877pxor %xmm0,%xmm9
3878
3879# qhasm: xmm10 ^= xmm1
3880# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
3881# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
3882pxor %xmm1,%xmm10
3883
3884# qhasm: xmm9 ^= xmm5
3885# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
3886# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
3887pxor %xmm5,%xmm9
3888
3889# qhasm: xmm11 ^= xmm4
3890# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
3891# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
3892pxor %xmm4,%xmm11
3893
3894# qhasm: xmm12 ^= xmm6
3895# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
3896# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
3897pxor %xmm6,%xmm12
3898
3899# qhasm: xmm13 ^= xmm3
3900# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
3901# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
3902pxor %xmm3,%xmm13
3903
3904# qhasm: xmm11 ^= xmm5
3905# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
3906# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
3907pxor %xmm5,%xmm11
3908
3909# qhasm: xmm14 ^= xmm7
3910# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
3911# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
3912pxor %xmm7,%xmm14
3913
3914# qhasm: xmm15 ^= xmm2
3915# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
3916# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
3917pxor %xmm2,%xmm15
3918
3919# qhasm: xmm12 ^= xmm5
3920# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
3921# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
3922pxor %xmm5,%xmm12
3923
3924# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
3925# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
3926# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
3927pshufd $0x4E,%xmm0,%xmm0
3928
3929# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
3930# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
3931# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
3932pshufd $0x4E,%xmm1,%xmm1
3933
3934# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
3935# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
3936# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
3937pshufd $0x4E,%xmm4,%xmm4
3938
3939# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
3940# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
3941# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
3942pshufd $0x4E,%xmm6,%xmm6
3943
3944# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
3945# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
3946# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
3947pshufd $0x4E,%xmm3,%xmm3
3948
3949# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
3950# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
3951# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
3952pshufd $0x4E,%xmm7,%xmm7
3953
3954# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
3955# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
3956# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
3957pshufd $0x4E,%xmm2,%xmm2
3958
3959# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
3960# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
3961# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
3962pshufd $0x4E,%xmm5,%xmm5
3963
3964# qhasm: xmm8 ^= xmm0
3965# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
3966# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
3967pxor %xmm0,%xmm8
3968
3969# qhasm: xmm9 ^= xmm1
3970# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
3971# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
3972pxor %xmm1,%xmm9
3973
3974# qhasm: xmm10 ^= xmm4
3975# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
3976# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
3977pxor %xmm4,%xmm10
3978
3979# qhasm: xmm11 ^= xmm6
3980# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
3981# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
3982pxor %xmm6,%xmm11
3983
3984# qhasm: xmm12 ^= xmm3
3985# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
3986# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
3987pxor %xmm3,%xmm12
3988
3989# qhasm: xmm13 ^= xmm7
3990# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
3991# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
3992pxor %xmm7,%xmm13
3993
3994# qhasm: xmm14 ^= xmm2
3995# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
3996# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
3997pxor %xmm2,%xmm14
3998
3999# qhasm: xmm15 ^= xmm5
4000# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
4001# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
4002pxor %xmm5,%xmm15
4003
4004# qhasm: xmm8 ^= *(int128 *)(c + 384)
4005# asm 1: pxor 384(<c=int64#5),<xmm8=int6464#9
4006# asm 2: pxor 384(<c=%r8),<xmm8=%xmm8
4007pxor 384(%r8),%xmm8
4008
4009# qhasm: shuffle bytes of xmm8 by SR
4010# asm 1: pshufb SR,<xmm8=int6464#9
4011# asm 2: pshufb SR,<xmm8=%xmm8
4012pshufb SR,%xmm8
4013
4014# qhasm: xmm9 ^= *(int128 *)(c + 400)
4015# asm 1: pxor 400(<c=int64#5),<xmm9=int6464#10
4016# asm 2: pxor 400(<c=%r8),<xmm9=%xmm9
4017pxor 400(%r8),%xmm9
4018
4019# qhasm: shuffle bytes of xmm9 by SR
4020# asm 1: pshufb SR,<xmm9=int6464#10
4021# asm 2: pshufb SR,<xmm9=%xmm9
4022pshufb SR,%xmm9
4023
4024# qhasm: xmm10 ^= *(int128 *)(c + 416)
4025# asm 1: pxor 416(<c=int64#5),<xmm10=int6464#11
4026# asm 2: pxor 416(<c=%r8),<xmm10=%xmm10
4027pxor 416(%r8),%xmm10
4028
4029# qhasm: shuffle bytes of xmm10 by SR
4030# asm 1: pshufb SR,<xmm10=int6464#11
4031# asm 2: pshufb SR,<xmm10=%xmm10
4032pshufb SR,%xmm10
4033
4034# qhasm: xmm11 ^= *(int128 *)(c + 432)
4035# asm 1: pxor 432(<c=int64#5),<xmm11=int6464#12
4036# asm 2: pxor 432(<c=%r8),<xmm11=%xmm11
4037pxor 432(%r8),%xmm11
4038
4039# qhasm: shuffle bytes of xmm11 by SR
4040# asm 1: pshufb SR,<xmm11=int6464#12
4041# asm 2: pshufb SR,<xmm11=%xmm11
4042pshufb SR,%xmm11
4043
4044# qhasm: xmm12 ^= *(int128 *)(c + 448)
4045# asm 1: pxor 448(<c=int64#5),<xmm12=int6464#13
4046# asm 2: pxor 448(<c=%r8),<xmm12=%xmm12
4047pxor 448(%r8),%xmm12
4048
4049# qhasm: shuffle bytes of xmm12 by SR
4050# asm 1: pshufb SR,<xmm12=int6464#13
4051# asm 2: pshufb SR,<xmm12=%xmm12
4052pshufb SR,%xmm12
4053
4054# qhasm: xmm13 ^= *(int128 *)(c + 464)
4055# asm 1: pxor 464(<c=int64#5),<xmm13=int6464#14
4056# asm 2: pxor 464(<c=%r8),<xmm13=%xmm13
4057pxor 464(%r8),%xmm13
4058
4059# qhasm: shuffle bytes of xmm13 by SR
4060# asm 1: pshufb SR,<xmm13=int6464#14
4061# asm 2: pshufb SR,<xmm13=%xmm13
4062pshufb SR,%xmm13
4063
4064# qhasm: xmm14 ^= *(int128 *)(c + 480)
4065# asm 1: pxor 480(<c=int64#5),<xmm14=int6464#15
4066# asm 2: pxor 480(<c=%r8),<xmm14=%xmm14
4067pxor 480(%r8),%xmm14
4068
4069# qhasm: shuffle bytes of xmm14 by SR
4070# asm 1: pshufb SR,<xmm14=int6464#15
4071# asm 2: pshufb SR,<xmm14=%xmm14
4072pshufb SR,%xmm14
4073
4074# qhasm: xmm15 ^= *(int128 *)(c + 496)
4075# asm 1: pxor 496(<c=int64#5),<xmm15=int6464#16
4076# asm 2: pxor 496(<c=%r8),<xmm15=%xmm15
4077pxor 496(%r8),%xmm15
4078
4079# qhasm: shuffle bytes of xmm15 by SR
4080# asm 1: pshufb SR,<xmm15=int6464#16
4081# asm 2: pshufb SR,<xmm15=%xmm15
4082pshufb SR,%xmm15
4083
4084# qhasm: xmm13 ^= xmm14
4085# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
4086# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
4087pxor %xmm14,%xmm13
4088
4089# qhasm: xmm10 ^= xmm9
4090# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
4091# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
4092pxor %xmm9,%xmm10
4093
4094# qhasm: xmm13 ^= xmm8
4095# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
4096# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
4097pxor %xmm8,%xmm13
4098
4099# qhasm: xmm14 ^= xmm10
4100# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
4101# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
4102pxor %xmm10,%xmm14
4103
4104# qhasm: xmm11 ^= xmm8
4105# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
4106# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
4107pxor %xmm8,%xmm11
4108
4109# qhasm: xmm14 ^= xmm11
4110# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4111# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4112pxor %xmm11,%xmm14
4113
4114# qhasm: xmm11 ^= xmm15
4115# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
4116# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
4117pxor %xmm15,%xmm11
4118
4119# qhasm: xmm11 ^= xmm12
4120# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
4121# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
4122pxor %xmm12,%xmm11
4123
4124# qhasm: xmm15 ^= xmm13
4125# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
4126# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
4127pxor %xmm13,%xmm15
4128
4129# qhasm: xmm11 ^= xmm9
4130# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
4131# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
4132pxor %xmm9,%xmm11
4133
4134# qhasm: xmm12 ^= xmm13
4135# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4136# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4137pxor %xmm13,%xmm12
4138
4139# qhasm: xmm10 ^= xmm15
4140# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
4141# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
4142pxor %xmm15,%xmm10
4143
4144# qhasm: xmm9 ^= xmm13
4145# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4146# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4147pxor %xmm13,%xmm9
4148
4149# qhasm: xmm3 = xmm15
4150# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
4151# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
4152movdqa %xmm15,%xmm0
4153
4154# qhasm: xmm2 = xmm9
4155# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
4156# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
4157movdqa %xmm9,%xmm1
4158
4159# qhasm: xmm1 = xmm13
4160# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
4161# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
4162movdqa %xmm13,%xmm2
4163
4164# qhasm: xmm5 = xmm10
4165# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
4166# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
4167movdqa %xmm10,%xmm3
4168
4169# qhasm: xmm4 = xmm14
4170# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
4171# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
4172movdqa %xmm14,%xmm4
4173
4174# qhasm: xmm3 ^= xmm12
4175# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
4176# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
4177pxor %xmm12,%xmm0
4178
4179# qhasm: xmm2 ^= xmm10
4180# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
4181# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
4182pxor %xmm10,%xmm1
4183
4184# qhasm: xmm1 ^= xmm11
4185# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
4186# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
4187pxor %xmm11,%xmm2
4188
4189# qhasm: xmm5 ^= xmm12
4190# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
4191# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
4192pxor %xmm12,%xmm3
4193
4194# qhasm: xmm4 ^= xmm8
4195# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
4196# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
4197pxor %xmm8,%xmm4
4198
4199# qhasm: xmm6 = xmm3
4200# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
4201# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
4202movdqa %xmm0,%xmm5
4203
4204# qhasm: xmm0 = xmm2
4205# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
4206# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
4207movdqa %xmm1,%xmm6
4208
4209# qhasm: xmm7 = xmm3
4210# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
4211# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
4212movdqa %xmm0,%xmm7
4213
4214# qhasm: xmm2 |= xmm1
4215# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
4216# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
4217por %xmm2,%xmm1
4218
4219# qhasm: xmm3 |= xmm4
4220# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
4221# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
4222por %xmm4,%xmm0
4223
4224# qhasm: xmm7 ^= xmm0
4225# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
4226# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
4227pxor %xmm6,%xmm7
4228
4229# qhasm: xmm6 &= xmm4
4230# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
4231# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
4232pand %xmm4,%xmm5
4233
4234# qhasm: xmm0 &= xmm1
4235# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
4236# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
4237pand %xmm2,%xmm6
4238
4239# qhasm: xmm4 ^= xmm1
4240# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
4241# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
4242pxor %xmm2,%xmm4
4243
4244# qhasm: xmm7 &= xmm4
4245# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
4246# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
4247pand %xmm4,%xmm7
4248
4249# qhasm: xmm4 = xmm11
4250# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
4251# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
4252movdqa %xmm11,%xmm2
4253
4254# qhasm: xmm4 ^= xmm8
4255# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
4256# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
4257pxor %xmm8,%xmm2
4258
4259# qhasm: xmm5 &= xmm4
4260# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
4261# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
4262pand %xmm2,%xmm3
4263
4264# qhasm: xmm3 ^= xmm5
4265# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
4266# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
4267pxor %xmm3,%xmm0
4268
4269# qhasm: xmm2 ^= xmm5
4270# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4271# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4272pxor %xmm3,%xmm1
4273
4274# qhasm: xmm5 = xmm15
4275# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
4276# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
4277movdqa %xmm15,%xmm2
4278
4279# qhasm: xmm5 ^= xmm9
4280# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
4281# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
4282pxor %xmm9,%xmm2
4283
4284# qhasm: xmm4 = xmm13
4285# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
4286# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
4287movdqa %xmm13,%xmm3
4288
4289# qhasm: xmm1 = xmm5
4290# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
4291# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
4292movdqa %xmm2,%xmm4
4293
4294# qhasm: xmm4 ^= xmm14
4295# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
4296# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
4297pxor %xmm14,%xmm3
4298
4299# qhasm: xmm1 |= xmm4
4300# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
4301# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
4302por %xmm3,%xmm4
4303
4304# qhasm: xmm5 &= xmm4
4305# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
4306# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
4307pand %xmm3,%xmm2
4308
4309# qhasm: xmm0 ^= xmm5
4310# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
4311# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
4312pxor %xmm2,%xmm6
4313
4314# qhasm: xmm3 ^= xmm7
4315# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
4316# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
4317pxor %xmm7,%xmm0
4318
4319# qhasm: xmm2 ^= xmm6
4320# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
4321# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
4322pxor %xmm5,%xmm1
4323
4324# qhasm: xmm1 ^= xmm7
4325# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
4326# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
4327pxor %xmm7,%xmm4
4328
4329# qhasm: xmm0 ^= xmm6
4330# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
4331# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
4332pxor %xmm5,%xmm6
4333
4334# qhasm: xmm1 ^= xmm6
4335# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4336# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4337pxor %xmm5,%xmm4
4338
4339# qhasm: xmm4 = xmm10
4340# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
4341# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
4342movdqa %xmm10,%xmm2
4343
4344# qhasm: xmm5 = xmm12
4345# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
4346# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
4347movdqa %xmm12,%xmm3
4348
4349# qhasm: xmm6 = xmm9
4350# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
4351# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
4352movdqa %xmm9,%xmm5
4353
4354# qhasm: xmm7 = xmm15
4355# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
4356# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
4357movdqa %xmm15,%xmm7
4358
4359# qhasm: xmm4 &= xmm11
4360# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
4361# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
4362pand %xmm11,%xmm2
4363
4364# qhasm: xmm5 &= xmm8
4365# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
4366# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
4367pand %xmm8,%xmm3
4368
4369# qhasm: xmm6 &= xmm13
4370# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
4371# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
4372pand %xmm13,%xmm5
4373
4374# qhasm: xmm7 |= xmm14
4375# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
4376# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
4377por %xmm14,%xmm7
4378
4379# qhasm: xmm3 ^= xmm4
4380# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
4381# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
4382pxor %xmm2,%xmm0
4383
4384# qhasm: xmm2 ^= xmm5
4385# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
4386# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
4387pxor %xmm3,%xmm1
4388
4389# qhasm: xmm1 ^= xmm6
4390# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
4391# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
4392pxor %xmm5,%xmm4
4393
4394# qhasm: xmm0 ^= xmm7
4395# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
4396# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
4397pxor %xmm7,%xmm6
4398
4399# qhasm: xmm4 = xmm3
4400# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
4401# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
4402movdqa %xmm0,%xmm2
4403
4404# qhasm: xmm4 ^= xmm2
4405# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
4406# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
4407pxor %xmm1,%xmm2
4408
4409# qhasm: xmm3 &= xmm1
4410# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
4411# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
4412pand %xmm4,%xmm0
4413
4414# qhasm: xmm6 = xmm0
4415# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
4416# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
4417movdqa %xmm6,%xmm3
4418
4419# qhasm: xmm6 ^= xmm3
4420# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
4421# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
4422pxor %xmm0,%xmm3
4423
4424# qhasm: xmm7 = xmm4
4425# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
4426# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
4427movdqa %xmm2,%xmm5
4428
4429# qhasm: xmm7 &= xmm6
4430# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
4431# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
4432pand %xmm3,%xmm5
4433
4434# qhasm: xmm7 ^= xmm2
4435# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
4436# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
4437pxor %xmm1,%xmm5
4438
4439# qhasm: xmm5 = xmm1
4440# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
4441# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
4442movdqa %xmm4,%xmm7
4443
4444# qhasm: xmm5 ^= xmm0
4445# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4446# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4447pxor %xmm6,%xmm7
4448
4449# qhasm: xmm3 ^= xmm2
4450# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
4451# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
4452pxor %xmm1,%xmm0
4453
4454# qhasm: xmm5 &= xmm3
4455# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
4456# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
4457pand %xmm0,%xmm7
4458
4459# qhasm: xmm5 ^= xmm0
4460# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
4461# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
4462pxor %xmm6,%xmm7
4463
4464# qhasm: xmm1 ^= xmm5
4465# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
4466# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
4467pxor %xmm7,%xmm4
4468
4469# qhasm: xmm2 = xmm6
4470# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
4471# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
4472movdqa %xmm3,%xmm0
4473
4474# qhasm: xmm2 ^= xmm5
4475# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
4476# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
4477pxor %xmm7,%xmm0
4478
4479# qhasm: xmm2 &= xmm0
4480# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
4481# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
4482pand %xmm6,%xmm0
4483
4484# qhasm: xmm1 ^= xmm2
4485# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
4486# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
4487pxor %xmm0,%xmm4
4488
4489# qhasm: xmm6 ^= xmm2
4490# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
4491# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
4492pxor %xmm0,%xmm3
4493
4494# qhasm: xmm6 &= xmm7
4495# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
4496# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
4497pand %xmm5,%xmm3
4498
4499# qhasm: xmm6 ^= xmm4
4500# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
4501# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
4502pxor %xmm2,%xmm3
4503
4504# qhasm: xmm4 = xmm14
4505# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
4506# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
4507movdqa %xmm14,%xmm0
4508
4509# qhasm: xmm0 = xmm13
4510# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
4511# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
4512movdqa %xmm13,%xmm1
4513
4514# qhasm: xmm2 = xmm7
4515# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
4516# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
4517movdqa %xmm5,%xmm2
4518
4519# qhasm: xmm2 ^= xmm6
4520# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
4521# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
4522pxor %xmm3,%xmm2
4523
4524# qhasm: xmm2 &= xmm14
4525# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
4526# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
4527pand %xmm14,%xmm2
4528
4529# qhasm: xmm14 ^= xmm13
4530# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4531# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4532pxor %xmm13,%xmm14
4533
4534# qhasm: xmm14 &= xmm6
4535# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
4536# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
4537pand %xmm3,%xmm14
4538
4539# qhasm: xmm13 &= xmm7
4540# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
4541# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
4542pand %xmm5,%xmm13
4543
4544# qhasm: xmm14 ^= xmm13
4545# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
4546# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
4547pxor %xmm13,%xmm14
4548
4549# qhasm: xmm13 ^= xmm2
4550# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
4551# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
4552pxor %xmm2,%xmm13
4553
4554# qhasm: xmm4 ^= xmm8
4555# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
4556# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
4557pxor %xmm8,%xmm0
4558
4559# qhasm: xmm0 ^= xmm11
4560# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
4561# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
4562pxor %xmm11,%xmm1
4563
4564# qhasm: xmm7 ^= xmm5
4565# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4566# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4567pxor %xmm7,%xmm5
4568
4569# qhasm: xmm6 ^= xmm1
4570# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4571# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4572pxor %xmm4,%xmm3
4573
4574# qhasm: xmm3 = xmm7
4575# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4576# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4577movdqa %xmm5,%xmm2
4578
4579# qhasm: xmm3 ^= xmm6
4580# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4581# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4582pxor %xmm3,%xmm2
4583
4584# qhasm: xmm3 &= xmm4
4585# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4586# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4587pand %xmm0,%xmm2
4588
4589# qhasm: xmm4 ^= xmm0
4590# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4591# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4592pxor %xmm1,%xmm0
4593
4594# qhasm: xmm4 &= xmm6
4595# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4596# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4597pand %xmm3,%xmm0
4598
4599# qhasm: xmm0 &= xmm7
4600# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4601# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4602pand %xmm5,%xmm1
4603
4604# qhasm: xmm0 ^= xmm4
4605# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4606# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4607pxor %xmm0,%xmm1
4608
4609# qhasm: xmm4 ^= xmm3
4610# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4611# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4612pxor %xmm2,%xmm0
4613
4614# qhasm: xmm2 = xmm5
4615# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4616# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4617movdqa %xmm7,%xmm2
4618
4619# qhasm: xmm2 ^= xmm1
4620# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4621# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4622pxor %xmm4,%xmm2
4623
4624# qhasm: xmm2 &= xmm8
4625# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
4626# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
4627pand %xmm8,%xmm2
4628
4629# qhasm: xmm8 ^= xmm11
4630# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4631# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4632pxor %xmm11,%xmm8
4633
4634# qhasm: xmm8 &= xmm1
4635# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
4636# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
4637pand %xmm4,%xmm8
4638
4639# qhasm: xmm11 &= xmm5
4640# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
4641# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
4642pand %xmm7,%xmm11
4643
4644# qhasm: xmm8 ^= xmm11
4645# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
4646# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
4647pxor %xmm11,%xmm8
4648
4649# qhasm: xmm11 ^= xmm2
4650# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
4651# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
4652pxor %xmm2,%xmm11
4653
4654# qhasm: xmm14 ^= xmm4
4655# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
4656# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
4657pxor %xmm0,%xmm14
4658
4659# qhasm: xmm8 ^= xmm4
4660# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
4661# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
4662pxor %xmm0,%xmm8
4663
4664# qhasm: xmm13 ^= xmm0
4665# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
4666# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
4667pxor %xmm1,%xmm13
4668
4669# qhasm: xmm11 ^= xmm0
4670# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
4671# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
4672pxor %xmm1,%xmm11
4673
4674# qhasm: xmm4 = xmm15
4675# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
4676# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
4677movdqa %xmm15,%xmm0
4678
4679# qhasm: xmm0 = xmm9
4680# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
4681# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
4682movdqa %xmm9,%xmm1
4683
4684# qhasm: xmm4 ^= xmm12
4685# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
4686# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
4687pxor %xmm12,%xmm0
4688
4689# qhasm: xmm0 ^= xmm10
4690# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
4691# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
4692pxor %xmm10,%xmm1
4693
4694# qhasm: xmm3 = xmm7
4695# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4696# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4697movdqa %xmm5,%xmm2
4698
4699# qhasm: xmm3 ^= xmm6
4700# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4701# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4702pxor %xmm3,%xmm2
4703
4704# qhasm: xmm3 &= xmm4
4705# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
4706# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
4707pand %xmm0,%xmm2
4708
4709# qhasm: xmm4 ^= xmm0
4710# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
4711# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
4712pxor %xmm1,%xmm0
4713
4714# qhasm: xmm4 &= xmm6
4715# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
4716# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
4717pand %xmm3,%xmm0
4718
4719# qhasm: xmm0 &= xmm7
4720# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
4721# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
4722pand %xmm5,%xmm1
4723
4724# qhasm: xmm0 ^= xmm4
4725# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
4726# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
4727pxor %xmm0,%xmm1
4728
4729# qhasm: xmm4 ^= xmm3
4730# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
4731# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
4732pxor %xmm2,%xmm0
4733
4734# qhasm: xmm2 = xmm5
4735# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
4736# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
4737movdqa %xmm7,%xmm2
4738
4739# qhasm: xmm2 ^= xmm1
4740# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
4741# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
4742pxor %xmm4,%xmm2
4743
4744# qhasm: xmm2 &= xmm12
4745# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
4746# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
4747pand %xmm12,%xmm2
4748
4749# qhasm: xmm12 ^= xmm10
4750# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4751# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4752pxor %xmm10,%xmm12
4753
4754# qhasm: xmm12 &= xmm1
4755# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
4756# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
4757pand %xmm4,%xmm12
4758
4759# qhasm: xmm10 &= xmm5
4760# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
4761# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
4762pand %xmm7,%xmm10
4763
4764# qhasm: xmm12 ^= xmm10
4765# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
4766# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
4767pxor %xmm10,%xmm12
4768
4769# qhasm: xmm10 ^= xmm2
4770# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
4771# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
4772pxor %xmm2,%xmm10
4773
4774# qhasm: xmm7 ^= xmm5
4775# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
4776# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
4777pxor %xmm7,%xmm5
4778
4779# qhasm: xmm6 ^= xmm1
4780# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
4781# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
4782pxor %xmm4,%xmm3
4783
4784# qhasm: xmm3 = xmm7
4785# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
4786# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
4787movdqa %xmm5,%xmm2
4788
4789# qhasm: xmm3 ^= xmm6
4790# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
4791# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
4792pxor %xmm3,%xmm2
4793
4794# qhasm: xmm3 &= xmm15
4795# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
4796# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
4797pand %xmm15,%xmm2
4798
4799# qhasm: xmm15 ^= xmm9
4800# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4801# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4802pxor %xmm9,%xmm15
4803
4804# qhasm: xmm15 &= xmm6
4805# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
4806# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
4807pand %xmm3,%xmm15
4808
4809# qhasm: xmm9 &= xmm7
4810# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
4811# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
4812pand %xmm5,%xmm9
4813
4814# qhasm: xmm15 ^= xmm9
4815# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
4816# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
4817pxor %xmm9,%xmm15
4818
4819# qhasm: xmm9 ^= xmm3
4820# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
4821# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
4822pxor %xmm2,%xmm9
4823
4824# qhasm: xmm15 ^= xmm4
4825# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
4826# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
4827pxor %xmm0,%xmm15
4828
4829# qhasm: xmm12 ^= xmm4
4830# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
4831# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
4832pxor %xmm0,%xmm12
4833
4834# qhasm: xmm9 ^= xmm0
4835# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
4836# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
4837pxor %xmm1,%xmm9
4838
4839# qhasm: xmm10 ^= xmm0
4840# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
4841# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
4842pxor %xmm1,%xmm10
4843
4844# qhasm: xmm15 ^= xmm8
4845# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
4846# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
4847pxor %xmm8,%xmm15
4848
4849# qhasm: xmm9 ^= xmm14
4850# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
4851# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
4852pxor %xmm14,%xmm9
4853
4854# qhasm: xmm12 ^= xmm15
4855# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
4856# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
4857pxor %xmm15,%xmm12
4858
4859# qhasm: xmm14 ^= xmm8
4860# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
4861# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
4862pxor %xmm8,%xmm14
4863
4864# qhasm: xmm8 ^= xmm9
4865# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
4866# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
4867pxor %xmm9,%xmm8
4868
4869# qhasm: xmm9 ^= xmm13
4870# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
4871# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
4872pxor %xmm13,%xmm9
4873
4874# qhasm: xmm13 ^= xmm10
4875# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
4876# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
4877pxor %xmm10,%xmm13
4878
4879# qhasm: xmm12 ^= xmm13
4880# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
4881# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
4882pxor %xmm13,%xmm12
4883
4884# qhasm: xmm10 ^= xmm11
4885# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
4886# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
4887pxor %xmm11,%xmm10
4888
4889# qhasm: xmm11 ^= xmm13
4890# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
4891# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
4892pxor %xmm13,%xmm11
4893
4894# qhasm: xmm14 ^= xmm11
4895# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
4896# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
4897pxor %xmm11,%xmm14
4898
4899# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
4900# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
4901# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
4902pshufd $0x93,%xmm8,%xmm0
4903
4904# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
4905# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
4906# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
4907pshufd $0x93,%xmm9,%xmm1
4908
4909# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
4910# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
4911# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
4912pshufd $0x93,%xmm12,%xmm2
4913
4914# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
4915# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
4916# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
4917pshufd $0x93,%xmm14,%xmm3
4918
4919# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
4920# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
4921# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
4922pshufd $0x93,%xmm11,%xmm4
4923
4924# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
4925# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
4926# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
4927pshufd $0x93,%xmm15,%xmm5
4928
4929# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
4930# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
4931# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
4932pshufd $0x93,%xmm10,%xmm6
4933
4934# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
4935# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
4936# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
4937pshufd $0x93,%xmm13,%xmm7
4938
4939# qhasm: xmm8 ^= xmm0
4940# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
4941# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
4942pxor %xmm0,%xmm8
4943
4944# qhasm: xmm9 ^= xmm1
4945# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
4946# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
4947pxor %xmm1,%xmm9
4948
4949# qhasm: xmm12 ^= xmm2
4950# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
4951# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
4952pxor %xmm2,%xmm12
4953
4954# qhasm: xmm14 ^= xmm3
4955# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
4956# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
4957pxor %xmm3,%xmm14
4958
4959# qhasm: xmm11 ^= xmm4
4960# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
4961# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
4962pxor %xmm4,%xmm11
4963
4964# qhasm: xmm15 ^= xmm5
4965# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
4966# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
4967pxor %xmm5,%xmm15
4968
4969# qhasm: xmm10 ^= xmm6
4970# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
4971# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
4972pxor %xmm6,%xmm10
4973
4974# qhasm: xmm13 ^= xmm7
4975# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
4976# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
4977pxor %xmm7,%xmm13
4978
4979# qhasm: xmm0 ^= xmm13
4980# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
4981# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
4982pxor %xmm13,%xmm0
4983
4984# qhasm: xmm1 ^= xmm8
4985# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
4986# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
4987pxor %xmm8,%xmm1
4988
4989# qhasm: xmm2 ^= xmm9
4990# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
4991# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
4992pxor %xmm9,%xmm2
4993
4994# qhasm: xmm1 ^= xmm13
4995# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
4996# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
4997pxor %xmm13,%xmm1
4998
4999# qhasm: xmm3 ^= xmm12
5000# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
5001# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
5002pxor %xmm12,%xmm3
5003
5004# qhasm: xmm4 ^= xmm14
5005# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
5006# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
5007pxor %xmm14,%xmm4
5008
5009# qhasm: xmm5 ^= xmm11
5010# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
5011# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
5012pxor %xmm11,%xmm5
5013
5014# qhasm: xmm3 ^= xmm13
5015# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
5016# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
5017pxor %xmm13,%xmm3
5018
5019# qhasm: xmm6 ^= xmm15
5020# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
5021# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
5022pxor %xmm15,%xmm6
5023
5024# qhasm: xmm7 ^= xmm10
5025# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
5026# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
5027pxor %xmm10,%xmm7
5028
5029# qhasm: xmm4 ^= xmm13
5030# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
5031# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
5032pxor %xmm13,%xmm4
5033
5034# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
5035# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
5036# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
5037pshufd $0x4E,%xmm8,%xmm8
5038
5039# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
5040# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
5041# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
5042pshufd $0x4E,%xmm9,%xmm9
5043
5044# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
5045# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
5046# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
5047pshufd $0x4E,%xmm12,%xmm12
5048
5049# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
5050# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
5051# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
5052pshufd $0x4E,%xmm14,%xmm14
5053
5054# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
5055# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
5056# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
5057pshufd $0x4E,%xmm11,%xmm11
5058
5059# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
5060# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
5061# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
5062pshufd $0x4E,%xmm15,%xmm15
5063
5064# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
5065# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
5066# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
5067pshufd $0x4E,%xmm10,%xmm10
5068
5069# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
5070# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
5071# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
5072pshufd $0x4E,%xmm13,%xmm13
5073
5074# qhasm: xmm0 ^= xmm8
5075# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
5076# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
5077pxor %xmm8,%xmm0
5078
5079# qhasm: xmm1 ^= xmm9
5080# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
5081# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
5082pxor %xmm9,%xmm1
5083
5084# qhasm: xmm2 ^= xmm12
5085# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
5086# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
5087pxor %xmm12,%xmm2
5088
5089# qhasm: xmm3 ^= xmm14
5090# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
5091# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
5092pxor %xmm14,%xmm3
5093
5094# qhasm: xmm4 ^= xmm11
5095# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
5096# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
5097pxor %xmm11,%xmm4
5098
5099# qhasm: xmm5 ^= xmm15
5100# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
5101# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
5102pxor %xmm15,%xmm5
5103
5104# qhasm: xmm6 ^= xmm10
5105# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
5106# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
5107pxor %xmm10,%xmm6
5108
5109# qhasm: xmm7 ^= xmm13
5110# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
5111# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
5112pxor %xmm13,%xmm7
5113
5114# qhasm: xmm0 ^= *(int128 *)(c + 512)
5115# asm 1: pxor 512(<c=int64#5),<xmm0=int6464#1
5116# asm 2: pxor 512(<c=%r8),<xmm0=%xmm0
5117pxor 512(%r8),%xmm0
5118
5119# qhasm: shuffle bytes of xmm0 by SR
5120# asm 1: pshufb SR,<xmm0=int6464#1
5121# asm 2: pshufb SR,<xmm0=%xmm0
5122pshufb SR,%xmm0
5123
5124# qhasm: xmm1 ^= *(int128 *)(c + 528)
5125# asm 1: pxor 528(<c=int64#5),<xmm1=int6464#2
5126# asm 2: pxor 528(<c=%r8),<xmm1=%xmm1
5127pxor 528(%r8),%xmm1
5128
5129# qhasm: shuffle bytes of xmm1 by SR
5130# asm 1: pshufb SR,<xmm1=int6464#2
5131# asm 2: pshufb SR,<xmm1=%xmm1
5132pshufb SR,%xmm1
5133
5134# qhasm: xmm2 ^= *(int128 *)(c + 544)
5135# asm 1: pxor 544(<c=int64#5),<xmm2=int6464#3
5136# asm 2: pxor 544(<c=%r8),<xmm2=%xmm2
5137pxor 544(%r8),%xmm2
5138
5139# qhasm: shuffle bytes of xmm2 by SR
5140# asm 1: pshufb SR,<xmm2=int6464#3
5141# asm 2: pshufb SR,<xmm2=%xmm2
5142pshufb SR,%xmm2
5143
5144# qhasm: xmm3 ^= *(int128 *)(c + 560)
5145# asm 1: pxor 560(<c=int64#5),<xmm3=int6464#4
5146# asm 2: pxor 560(<c=%r8),<xmm3=%xmm3
5147pxor 560(%r8),%xmm3
5148
5149# qhasm: shuffle bytes of xmm3 by SR
5150# asm 1: pshufb SR,<xmm3=int6464#4
5151# asm 2: pshufb SR,<xmm3=%xmm3
5152pshufb SR,%xmm3
5153
5154# qhasm: xmm4 ^= *(int128 *)(c + 576)
5155# asm 1: pxor 576(<c=int64#5),<xmm4=int6464#5
5156# asm 2: pxor 576(<c=%r8),<xmm4=%xmm4
5157pxor 576(%r8),%xmm4
5158
5159# qhasm: shuffle bytes of xmm4 by SR
5160# asm 1: pshufb SR,<xmm4=int6464#5
5161# asm 2: pshufb SR,<xmm4=%xmm4
5162pshufb SR,%xmm4
5163
5164# qhasm: xmm5 ^= *(int128 *)(c + 592)
5165# asm 1: pxor 592(<c=int64#5),<xmm5=int6464#6
5166# asm 2: pxor 592(<c=%r8),<xmm5=%xmm5
5167pxor 592(%r8),%xmm5
5168
5169# qhasm: shuffle bytes of xmm5 by SR
5170# asm 1: pshufb SR,<xmm5=int6464#6
5171# asm 2: pshufb SR,<xmm5=%xmm5
5172pshufb SR,%xmm5
5173
5174# qhasm: xmm6 ^= *(int128 *)(c + 608)
5175# asm 1: pxor 608(<c=int64#5),<xmm6=int6464#7
5176# asm 2: pxor 608(<c=%r8),<xmm6=%xmm6
5177pxor 608(%r8),%xmm6
5178
5179# qhasm: shuffle bytes of xmm6 by SR
5180# asm 1: pshufb SR,<xmm6=int6464#7
5181# asm 2: pshufb SR,<xmm6=%xmm6
5182pshufb SR,%xmm6
5183
5184# qhasm: xmm7 ^= *(int128 *)(c + 624)
5185# asm 1: pxor 624(<c=int64#5),<xmm7=int6464#8
5186# asm 2: pxor 624(<c=%r8),<xmm7=%xmm7
5187pxor 624(%r8),%xmm7
5188
5189# qhasm: shuffle bytes of xmm7 by SR
5190# asm 1: pshufb SR,<xmm7=int6464#8
5191# asm 2: pshufb SR,<xmm7=%xmm7
5192pshufb SR,%xmm7
5193
5194# qhasm: xmm5 ^= xmm6
5195# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
5196# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
5197pxor %xmm6,%xmm5
5198
5199# qhasm: xmm2 ^= xmm1
5200# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
5201# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
5202pxor %xmm1,%xmm2
5203
5204# qhasm: xmm5 ^= xmm0
5205# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
5206# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
5207pxor %xmm0,%xmm5
5208
5209# qhasm: xmm6 ^= xmm2
5210# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
5211# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
5212pxor %xmm2,%xmm6
5213
5214# qhasm: xmm3 ^= xmm0
5215# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
5216# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
5217pxor %xmm0,%xmm3
5218
5219# qhasm: xmm6 ^= xmm3
5220# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
5221# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
5222pxor %xmm3,%xmm6
5223
5224# qhasm: xmm3 ^= xmm7
5225# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
5226# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
5227pxor %xmm7,%xmm3
5228
5229# qhasm: xmm3 ^= xmm4
5230# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
5231# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
5232pxor %xmm4,%xmm3
5233
5234# qhasm: xmm7 ^= xmm5
5235# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
5236# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
5237pxor %xmm5,%xmm7
5238
5239# qhasm: xmm3 ^= xmm1
5240# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
5241# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
5242pxor %xmm1,%xmm3
5243
5244# qhasm: xmm4 ^= xmm5
5245# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5246# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5247pxor %xmm5,%xmm4
5248
5249# qhasm: xmm2 ^= xmm7
5250# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
5251# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
5252pxor %xmm7,%xmm2
5253
5254# qhasm: xmm1 ^= xmm5
5255# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5256# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5257pxor %xmm5,%xmm1
5258
5259# qhasm: xmm11 = xmm7
5260# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
5261# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
5262movdqa %xmm7,%xmm8
5263
5264# qhasm: xmm10 = xmm1
5265# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
5266# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
5267movdqa %xmm1,%xmm9
5268
5269# qhasm: xmm9 = xmm5
5270# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
5271# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
5272movdqa %xmm5,%xmm10
5273
5274# qhasm: xmm13 = xmm2
5275# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
5276# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
5277movdqa %xmm2,%xmm11
5278
5279# qhasm: xmm12 = xmm6
5280# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
5281# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
5282movdqa %xmm6,%xmm12
5283
5284# qhasm: xmm11 ^= xmm4
5285# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
5286# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
5287pxor %xmm4,%xmm8
5288
5289# qhasm: xmm10 ^= xmm2
5290# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
5291# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
5292pxor %xmm2,%xmm9
5293
5294# qhasm: xmm9 ^= xmm3
5295# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
5296# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
5297pxor %xmm3,%xmm10
5298
5299# qhasm: xmm13 ^= xmm4
5300# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
5301# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
5302pxor %xmm4,%xmm11
5303
5304# qhasm: xmm12 ^= xmm0
5305# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
5306# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
5307pxor %xmm0,%xmm12
5308
5309# qhasm: xmm14 = xmm11
5310# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
5311# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
5312movdqa %xmm8,%xmm13
5313
5314# qhasm: xmm8 = xmm10
5315# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
5316# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
5317movdqa %xmm9,%xmm14
5318
5319# qhasm: xmm15 = xmm11
5320# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
5321# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
5322movdqa %xmm8,%xmm15
5323
5324# qhasm: xmm10 |= xmm9
5325# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
5326# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
5327por %xmm10,%xmm9
5328
5329# qhasm: xmm11 |= xmm12
5330# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
5331# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
5332por %xmm12,%xmm8
5333
5334# qhasm: xmm15 ^= xmm8
5335# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
5336# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
5337pxor %xmm14,%xmm15
5338
5339# qhasm: xmm14 &= xmm12
5340# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
5341# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
5342pand %xmm12,%xmm13
5343
5344# qhasm: xmm8 &= xmm9
5345# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
5346# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
5347pand %xmm10,%xmm14
5348
5349# qhasm: xmm12 ^= xmm9
5350# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
5351# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
5352pxor %xmm10,%xmm12
5353
5354# qhasm: xmm15 &= xmm12
5355# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
5356# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
5357pand %xmm12,%xmm15
5358
5359# qhasm: xmm12 = xmm3
5360# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
5361# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
5362movdqa %xmm3,%xmm10
5363
5364# qhasm: xmm12 ^= xmm0
5365# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
5366# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
5367pxor %xmm0,%xmm10
5368
5369# qhasm: xmm13 &= xmm12
5370# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
5371# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
5372pand %xmm10,%xmm11
5373
5374# qhasm: xmm11 ^= xmm13
5375# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
5376# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
5377pxor %xmm11,%xmm8
5378
5379# qhasm: xmm10 ^= xmm13
5380# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5381# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5382pxor %xmm11,%xmm9
5383
5384# qhasm: xmm13 = xmm7
5385# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
5386# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
5387movdqa %xmm7,%xmm10
5388
5389# qhasm: xmm13 ^= xmm1
5390# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
5391# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
5392pxor %xmm1,%xmm10
5393
5394# qhasm: xmm12 = xmm5
5395# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
5396# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
5397movdqa %xmm5,%xmm11
5398
5399# qhasm: xmm9 = xmm13
5400# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
5401# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
5402movdqa %xmm10,%xmm12
5403
5404# qhasm: xmm12 ^= xmm6
5405# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
5406# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
5407pxor %xmm6,%xmm11
5408
5409# qhasm: xmm9 |= xmm12
5410# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
5411# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
5412por %xmm11,%xmm12
5413
5414# qhasm: xmm13 &= xmm12
5415# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
5416# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
5417pand %xmm11,%xmm10
5418
5419# qhasm: xmm8 ^= xmm13
5420# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
5421# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
5422pxor %xmm10,%xmm14
5423
5424# qhasm: xmm11 ^= xmm15
5425# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
5426# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
5427pxor %xmm15,%xmm8
5428
5429# qhasm: xmm10 ^= xmm14
5430# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
5431# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
5432pxor %xmm13,%xmm9
5433
5434# qhasm: xmm9 ^= xmm15
5435# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
5436# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
5437pxor %xmm15,%xmm12
5438
5439# qhasm: xmm8 ^= xmm14
5440# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
5441# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
5442pxor %xmm13,%xmm14
5443
5444# qhasm: xmm9 ^= xmm14
5445# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5446# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5447pxor %xmm13,%xmm12
5448
5449# qhasm: xmm12 = xmm2
5450# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
5451# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
5452movdqa %xmm2,%xmm10
5453
5454# qhasm: xmm13 = xmm4
5455# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
5456# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
5457movdqa %xmm4,%xmm11
5458
5459# qhasm: xmm14 = xmm1
5460# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
5461# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
5462movdqa %xmm1,%xmm13
5463
5464# qhasm: xmm15 = xmm7
5465# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
5466# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
5467movdqa %xmm7,%xmm15
5468
5469# qhasm: xmm12 &= xmm3
5470# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
5471# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
5472pand %xmm3,%xmm10
5473
5474# qhasm: xmm13 &= xmm0
5475# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
5476# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
5477pand %xmm0,%xmm11
5478
5479# qhasm: xmm14 &= xmm5
5480# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
5481# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
5482pand %xmm5,%xmm13
5483
5484# qhasm: xmm15 |= xmm6
5485# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
5486# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
5487por %xmm6,%xmm15
5488
5489# qhasm: xmm11 ^= xmm12
5490# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
5491# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
5492pxor %xmm10,%xmm8
5493
5494# qhasm: xmm10 ^= xmm13
5495# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
5496# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
5497pxor %xmm11,%xmm9
5498
5499# qhasm: xmm9 ^= xmm14
5500# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
5501# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
5502pxor %xmm13,%xmm12
5503
5504# qhasm: xmm8 ^= xmm15
5505# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
5506# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
5507pxor %xmm15,%xmm14
5508
5509# qhasm: xmm12 = xmm11
5510# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
5511# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
5512movdqa %xmm8,%xmm10
5513
5514# qhasm: xmm12 ^= xmm10
5515# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
5516# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
5517pxor %xmm9,%xmm10
5518
5519# qhasm: xmm11 &= xmm9
5520# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
5521# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
5522pand %xmm12,%xmm8
5523
5524# qhasm: xmm14 = xmm8
5525# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
5526# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
5527movdqa %xmm14,%xmm11
5528
5529# qhasm: xmm14 ^= xmm11
5530# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
5531# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
5532pxor %xmm8,%xmm11
5533
5534# qhasm: xmm15 = xmm12
5535# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
5536# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
5537movdqa %xmm10,%xmm13
5538
5539# qhasm: xmm15 &= xmm14
5540# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
5541# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
5542pand %xmm11,%xmm13
5543
5544# qhasm: xmm15 ^= xmm10
5545# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
5546# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
5547pxor %xmm9,%xmm13
5548
5549# qhasm: xmm13 = xmm9
5550# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
5551# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
5552movdqa %xmm12,%xmm15
5553
5554# qhasm: xmm13 ^= xmm8
5555# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5556# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5557pxor %xmm14,%xmm15
5558
5559# qhasm: xmm11 ^= xmm10
5560# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
5561# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
5562pxor %xmm9,%xmm8
5563
5564# qhasm: xmm13 &= xmm11
5565# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
5566# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
5567pand %xmm8,%xmm15
5568
5569# qhasm: xmm13 ^= xmm8
5570# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
5571# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
5572pxor %xmm14,%xmm15
5573
5574# qhasm: xmm9 ^= xmm13
5575# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
5576# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
5577pxor %xmm15,%xmm12
5578
5579# qhasm: xmm10 = xmm14
5580# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
5581# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
5582movdqa %xmm11,%xmm8
5583
5584# qhasm: xmm10 ^= xmm13
5585# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
5586# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
5587pxor %xmm15,%xmm8
5588
5589# qhasm: xmm10 &= xmm8
5590# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
5591# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
5592pand %xmm14,%xmm8
5593
5594# qhasm: xmm9 ^= xmm10
5595# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
5596# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
5597pxor %xmm8,%xmm12
5598
5599# qhasm: xmm14 ^= xmm10
5600# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
5601# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
5602pxor %xmm8,%xmm11
5603
5604# qhasm: xmm14 &= xmm15
5605# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
5606# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
5607pand %xmm13,%xmm11
5608
5609# qhasm: xmm14 ^= xmm12
5610# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
5611# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
5612pxor %xmm10,%xmm11
5613
5614# qhasm: xmm12 = xmm6
5615# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
5616# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
5617movdqa %xmm6,%xmm8
5618
5619# qhasm: xmm8 = xmm5
5620# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
5621# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
5622movdqa %xmm5,%xmm9
5623
5624# qhasm: xmm10 = xmm15
5625# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
5626# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
5627movdqa %xmm13,%xmm10
5628
5629# qhasm: xmm10 ^= xmm14
5630# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
5631# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
5632pxor %xmm11,%xmm10
5633
5634# qhasm: xmm10 &= xmm6
5635# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
5636# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
5637pand %xmm6,%xmm10
5638
5639# qhasm: xmm6 ^= xmm5
5640# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5641# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5642pxor %xmm5,%xmm6
5643
5644# qhasm: xmm6 &= xmm14
5645# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
5646# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
5647pand %xmm11,%xmm6
5648
5649# qhasm: xmm5 &= xmm15
5650# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
5651# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
5652pand %xmm13,%xmm5
5653
5654# qhasm: xmm6 ^= xmm5
5655# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
5656# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
5657pxor %xmm5,%xmm6
5658
5659# qhasm: xmm5 ^= xmm10
5660# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
5661# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
5662pxor %xmm10,%xmm5
5663
5664# qhasm: xmm12 ^= xmm0
5665# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
5666# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
5667pxor %xmm0,%xmm8
5668
5669# qhasm: xmm8 ^= xmm3
5670# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
5671# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
5672pxor %xmm3,%xmm9
5673
5674# qhasm: xmm15 ^= xmm13
5675# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5676# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5677pxor %xmm15,%xmm13
5678
5679# qhasm: xmm14 ^= xmm9
5680# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5681# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5682pxor %xmm12,%xmm11
5683
5684# qhasm: xmm11 = xmm15
5685# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5686# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5687movdqa %xmm13,%xmm10
5688
5689# qhasm: xmm11 ^= xmm14
5690# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5691# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5692pxor %xmm11,%xmm10
5693
5694# qhasm: xmm11 &= xmm12
5695# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5696# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5697pand %xmm8,%xmm10
5698
5699# qhasm: xmm12 ^= xmm8
5700# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5701# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5702pxor %xmm9,%xmm8
5703
5704# qhasm: xmm12 &= xmm14
5705# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5706# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5707pand %xmm11,%xmm8
5708
5709# qhasm: xmm8 &= xmm15
5710# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5711# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5712pand %xmm13,%xmm9
5713
5714# qhasm: xmm8 ^= xmm12
5715# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5716# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5717pxor %xmm8,%xmm9
5718
5719# qhasm: xmm12 ^= xmm11
5720# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5721# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5722pxor %xmm10,%xmm8
5723
5724# qhasm: xmm10 = xmm13
5725# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5726# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5727movdqa %xmm15,%xmm10
5728
5729# qhasm: xmm10 ^= xmm9
5730# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5731# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5732pxor %xmm12,%xmm10
5733
5734# qhasm: xmm10 &= xmm0
5735# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
5736# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
5737pand %xmm0,%xmm10
5738
5739# qhasm: xmm0 ^= xmm3
5740# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5741# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5742pxor %xmm3,%xmm0
5743
5744# qhasm: xmm0 &= xmm9
5745# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
5746# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
5747pand %xmm12,%xmm0
5748
5749# qhasm: xmm3 &= xmm13
5750# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
5751# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
5752pand %xmm15,%xmm3
5753
5754# qhasm: xmm0 ^= xmm3
5755# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
5756# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
5757pxor %xmm3,%xmm0
5758
5759# qhasm: xmm3 ^= xmm10
5760# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
5761# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
5762pxor %xmm10,%xmm3
5763
5764# qhasm: xmm6 ^= xmm12
5765# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
5766# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
5767pxor %xmm8,%xmm6
5768
5769# qhasm: xmm0 ^= xmm12
5770# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
5771# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
5772pxor %xmm8,%xmm0
5773
5774# qhasm: xmm5 ^= xmm8
5775# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
5776# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
5777pxor %xmm9,%xmm5
5778
5779# qhasm: xmm3 ^= xmm8
5780# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
5781# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
5782pxor %xmm9,%xmm3
5783
5784# qhasm: xmm12 = xmm7
5785# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
5786# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
5787movdqa %xmm7,%xmm8
5788
5789# qhasm: xmm8 = xmm1
5790# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
5791# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
5792movdqa %xmm1,%xmm9
5793
5794# qhasm: xmm12 ^= xmm4
5795# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
5796# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
5797pxor %xmm4,%xmm8
5798
5799# qhasm: xmm8 ^= xmm2
5800# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
5801# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
5802pxor %xmm2,%xmm9
5803
5804# qhasm: xmm11 = xmm15
5805# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5806# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5807movdqa %xmm13,%xmm10
5808
5809# qhasm: xmm11 ^= xmm14
5810# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5811# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5812pxor %xmm11,%xmm10
5813
5814# qhasm: xmm11 &= xmm12
5815# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
5816# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
5817pand %xmm8,%xmm10
5818
5819# qhasm: xmm12 ^= xmm8
5820# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
5821# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
5822pxor %xmm9,%xmm8
5823
5824# qhasm: xmm12 &= xmm14
5825# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
5826# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
5827pand %xmm11,%xmm8
5828
5829# qhasm: xmm8 &= xmm15
5830# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
5831# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
5832pand %xmm13,%xmm9
5833
5834# qhasm: xmm8 ^= xmm12
5835# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
5836# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
5837pxor %xmm8,%xmm9
5838
5839# qhasm: xmm12 ^= xmm11
5840# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
5841# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
5842pxor %xmm10,%xmm8
5843
5844# qhasm: xmm10 = xmm13
5845# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
5846# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
5847movdqa %xmm15,%xmm10
5848
5849# qhasm: xmm10 ^= xmm9
5850# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
5851# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
5852pxor %xmm12,%xmm10
5853
5854# qhasm: xmm10 &= xmm4
5855# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
5856# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
5857pand %xmm4,%xmm10
5858
5859# qhasm: xmm4 ^= xmm2
5860# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5861# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5862pxor %xmm2,%xmm4
5863
5864# qhasm: xmm4 &= xmm9
5865# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
5866# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
5867pand %xmm12,%xmm4
5868
5869# qhasm: xmm2 &= xmm13
5870# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
5871# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
5872pand %xmm15,%xmm2
5873
5874# qhasm: xmm4 ^= xmm2
5875# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
5876# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
5877pxor %xmm2,%xmm4
5878
5879# qhasm: xmm2 ^= xmm10
5880# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
5881# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
5882pxor %xmm10,%xmm2
5883
5884# qhasm: xmm15 ^= xmm13
5885# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
5886# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
5887pxor %xmm15,%xmm13
5888
5889# qhasm: xmm14 ^= xmm9
5890# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
5891# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
5892pxor %xmm12,%xmm11
5893
5894# qhasm: xmm11 = xmm15
5895# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
5896# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
5897movdqa %xmm13,%xmm10
5898
5899# qhasm: xmm11 ^= xmm14
5900# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
5901# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
5902pxor %xmm11,%xmm10
5903
5904# qhasm: xmm11 &= xmm7
5905# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
5906# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
5907pand %xmm7,%xmm10
5908
5909# qhasm: xmm7 ^= xmm1
5910# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5911# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5912pxor %xmm1,%xmm7
5913
5914# qhasm: xmm7 &= xmm14
5915# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
5916# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
5917pand %xmm11,%xmm7
5918
5919# qhasm: xmm1 &= xmm15
5920# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
5921# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
5922pand %xmm13,%xmm1
5923
5924# qhasm: xmm7 ^= xmm1
5925# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
5926# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
5927pxor %xmm1,%xmm7
5928
5929# qhasm: xmm1 ^= xmm11
5930# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
5931# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
5932pxor %xmm10,%xmm1
5933
5934# qhasm: xmm7 ^= xmm12
5935# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
5936# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
5937pxor %xmm8,%xmm7
5938
5939# qhasm: xmm4 ^= xmm12
5940# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
5941# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
5942pxor %xmm8,%xmm4
5943
5944# qhasm: xmm1 ^= xmm8
5945# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
5946# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
5947pxor %xmm9,%xmm1
5948
5949# qhasm: xmm2 ^= xmm8
5950# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
5951# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
5952pxor %xmm9,%xmm2
5953
5954# qhasm: xmm7 ^= xmm0
5955# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
5956# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
5957pxor %xmm0,%xmm7
5958
5959# qhasm: xmm1 ^= xmm6
5960# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
5961# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
5962pxor %xmm6,%xmm1
5963
5964# qhasm: xmm4 ^= xmm7
5965# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
5966# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
5967pxor %xmm7,%xmm4
5968
5969# qhasm: xmm6 ^= xmm0
5970# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
5971# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
5972pxor %xmm0,%xmm6
5973
5974# qhasm: xmm0 ^= xmm1
5975# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
5976# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
5977pxor %xmm1,%xmm0
5978
5979# qhasm: xmm1 ^= xmm5
5980# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
5981# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
5982pxor %xmm5,%xmm1
5983
5984# qhasm: xmm5 ^= xmm2
5985# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
5986# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
5987pxor %xmm2,%xmm5
5988
5989# qhasm: xmm4 ^= xmm5
5990# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
5991# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
5992pxor %xmm5,%xmm4
5993
5994# qhasm: xmm2 ^= xmm3
5995# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
5996# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
5997pxor %xmm3,%xmm2
5998
5999# qhasm: xmm3 ^= xmm5
6000# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
6001# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
6002pxor %xmm5,%xmm3
6003
6004# qhasm: xmm6 ^= xmm3
6005# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
6006# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
6007pxor %xmm3,%xmm6
6008
6009# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
6010# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
6011# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
6012pshufd $0x93,%xmm0,%xmm8
6013
6014# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
6015# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
6016# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
6017pshufd $0x93,%xmm1,%xmm9
6018
6019# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
6020# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
6021# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
6022pshufd $0x93,%xmm4,%xmm10
6023
6024# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
6025# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
6026# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
6027pshufd $0x93,%xmm6,%xmm11
6028
6029# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
6030# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
6031# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
6032pshufd $0x93,%xmm3,%xmm12
6033
6034# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
6035# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
6036# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
6037pshufd $0x93,%xmm7,%xmm13
6038
6039# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
6040# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
6041# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
6042pshufd $0x93,%xmm2,%xmm14
6043
6044# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
6045# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
6046# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
6047pshufd $0x93,%xmm5,%xmm15
6048
6049# qhasm: xmm0 ^= xmm8
6050# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
6051# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
6052pxor %xmm8,%xmm0
6053
6054# qhasm: xmm1 ^= xmm9
6055# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
6056# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
6057pxor %xmm9,%xmm1
6058
6059# qhasm: xmm4 ^= xmm10
6060# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
6061# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
6062pxor %xmm10,%xmm4
6063
6064# qhasm: xmm6 ^= xmm11
6065# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
6066# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
6067pxor %xmm11,%xmm6
6068
6069# qhasm: xmm3 ^= xmm12
6070# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
6071# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
6072pxor %xmm12,%xmm3
6073
6074# qhasm: xmm7 ^= xmm13
6075# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
6076# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
6077pxor %xmm13,%xmm7
6078
6079# qhasm: xmm2 ^= xmm14
6080# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
6081# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
6082pxor %xmm14,%xmm2
6083
6084# qhasm: xmm5 ^= xmm15
6085# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
6086# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
6087pxor %xmm15,%xmm5
6088
6089# qhasm: xmm8 ^= xmm5
6090# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
6091# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
6092pxor %xmm5,%xmm8
6093
6094# qhasm: xmm9 ^= xmm0
6095# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
6096# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
6097pxor %xmm0,%xmm9
6098
6099# qhasm: xmm10 ^= xmm1
6100# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
6101# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
6102pxor %xmm1,%xmm10
6103
6104# qhasm: xmm9 ^= xmm5
6105# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
6106# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
6107pxor %xmm5,%xmm9
6108
6109# qhasm: xmm11 ^= xmm4
6110# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
6111# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
6112pxor %xmm4,%xmm11
6113
6114# qhasm: xmm12 ^= xmm6
6115# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
6116# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
6117pxor %xmm6,%xmm12
6118
6119# qhasm: xmm13 ^= xmm3
6120# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
6121# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
6122pxor %xmm3,%xmm13
6123
6124# qhasm: xmm11 ^= xmm5
6125# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
6126# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
6127pxor %xmm5,%xmm11
6128
6129# qhasm: xmm14 ^= xmm7
6130# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
6131# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
6132pxor %xmm7,%xmm14
6133
6134# qhasm: xmm15 ^= xmm2
6135# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
6136# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
6137pxor %xmm2,%xmm15
6138
6139# qhasm: xmm12 ^= xmm5
6140# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
6141# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
6142pxor %xmm5,%xmm12
6143
6144# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
6145# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
6146# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
6147pshufd $0x4E,%xmm0,%xmm0
6148
6149# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
6150# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
6151# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
6152pshufd $0x4E,%xmm1,%xmm1
6153
6154# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
6155# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
6156# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
6157pshufd $0x4E,%xmm4,%xmm4
6158
6159# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
6160# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
6161# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
6162pshufd $0x4E,%xmm6,%xmm6
6163
6164# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
6165# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
6166# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
6167pshufd $0x4E,%xmm3,%xmm3
6168
6169# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
6170# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
6171# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
6172pshufd $0x4E,%xmm7,%xmm7
6173
6174# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
6175# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
6176# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
6177pshufd $0x4E,%xmm2,%xmm2
6178
6179# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
6180# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
6181# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
6182pshufd $0x4E,%xmm5,%xmm5
6183
6184# qhasm: xmm8 ^= xmm0
6185# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
6186# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
6187pxor %xmm0,%xmm8
6188
6189# qhasm: xmm9 ^= xmm1
6190# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
6191# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
6192pxor %xmm1,%xmm9
6193
6194# qhasm: xmm10 ^= xmm4
6195# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
6196# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
6197pxor %xmm4,%xmm10
6198
6199# qhasm: xmm11 ^= xmm6
6200# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
6201# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
6202pxor %xmm6,%xmm11
6203
6204# qhasm: xmm12 ^= xmm3
6205# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
6206# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
6207pxor %xmm3,%xmm12
6208
6209# qhasm: xmm13 ^= xmm7
6210# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
6211# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
6212pxor %xmm7,%xmm13
6213
6214# qhasm: xmm14 ^= xmm2
6215# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
6216# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
6217pxor %xmm2,%xmm14
6218
6219# qhasm: xmm15 ^= xmm5
6220# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
6221# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
6222pxor %xmm5,%xmm15
6223
6224# qhasm: xmm8 ^= *(int128 *)(c + 640)
6225# asm 1: pxor 640(<c=int64#5),<xmm8=int6464#9
6226# asm 2: pxor 640(<c=%r8),<xmm8=%xmm8
6227pxor 640(%r8),%xmm8
6228
6229# qhasm: shuffle bytes of xmm8 by SR
6230# asm 1: pshufb SR,<xmm8=int6464#9
6231# asm 2: pshufb SR,<xmm8=%xmm8
6232pshufb SR,%xmm8
6233
6234# qhasm: xmm9 ^= *(int128 *)(c + 656)
6235# asm 1: pxor 656(<c=int64#5),<xmm9=int6464#10
6236# asm 2: pxor 656(<c=%r8),<xmm9=%xmm9
6237pxor 656(%r8),%xmm9
6238
6239# qhasm: shuffle bytes of xmm9 by SR
6240# asm 1: pshufb SR,<xmm9=int6464#10
6241# asm 2: pshufb SR,<xmm9=%xmm9
6242pshufb SR,%xmm9
6243
6244# qhasm: xmm10 ^= *(int128 *)(c + 672)
6245# asm 1: pxor 672(<c=int64#5),<xmm10=int6464#11
6246# asm 2: pxor 672(<c=%r8),<xmm10=%xmm10
6247pxor 672(%r8),%xmm10
6248
6249# qhasm: shuffle bytes of xmm10 by SR
6250# asm 1: pshufb SR,<xmm10=int6464#11
6251# asm 2: pshufb SR,<xmm10=%xmm10
6252pshufb SR,%xmm10
6253
6254# qhasm: xmm11 ^= *(int128 *)(c + 688)
6255# asm 1: pxor 688(<c=int64#5),<xmm11=int6464#12
6256# asm 2: pxor 688(<c=%r8),<xmm11=%xmm11
6257pxor 688(%r8),%xmm11
6258
6259# qhasm: shuffle bytes of xmm11 by SR
6260# asm 1: pshufb SR,<xmm11=int6464#12
6261# asm 2: pshufb SR,<xmm11=%xmm11
6262pshufb SR,%xmm11
6263
6264# qhasm: xmm12 ^= *(int128 *)(c + 704)
6265# asm 1: pxor 704(<c=int64#5),<xmm12=int6464#13
6266# asm 2: pxor 704(<c=%r8),<xmm12=%xmm12
6267pxor 704(%r8),%xmm12
6268
6269# qhasm: shuffle bytes of xmm12 by SR
6270# asm 1: pshufb SR,<xmm12=int6464#13
6271# asm 2: pshufb SR,<xmm12=%xmm12
6272pshufb SR,%xmm12
6273
6274# qhasm: xmm13 ^= *(int128 *)(c + 720)
6275# asm 1: pxor 720(<c=int64#5),<xmm13=int6464#14
6276# asm 2: pxor 720(<c=%r8),<xmm13=%xmm13
6277pxor 720(%r8),%xmm13
6278
6279# qhasm: shuffle bytes of xmm13 by SR
6280# asm 1: pshufb SR,<xmm13=int6464#14
6281# asm 2: pshufb SR,<xmm13=%xmm13
6282pshufb SR,%xmm13
6283
6284# qhasm: xmm14 ^= *(int128 *)(c + 736)
6285# asm 1: pxor 736(<c=int64#5),<xmm14=int6464#15
6286# asm 2: pxor 736(<c=%r8),<xmm14=%xmm14
6287pxor 736(%r8),%xmm14
6288
6289# qhasm: shuffle bytes of xmm14 by SR
6290# asm 1: pshufb SR,<xmm14=int6464#15
6291# asm 2: pshufb SR,<xmm14=%xmm14
6292pshufb SR,%xmm14
6293
6294# qhasm: xmm15 ^= *(int128 *)(c + 752)
6295# asm 1: pxor 752(<c=int64#5),<xmm15=int6464#16
6296# asm 2: pxor 752(<c=%r8),<xmm15=%xmm15
6297pxor 752(%r8),%xmm15
6298
6299# qhasm: shuffle bytes of xmm15 by SR
6300# asm 1: pshufb SR,<xmm15=int6464#16
6301# asm 2: pshufb SR,<xmm15=%xmm15
6302pshufb SR,%xmm15
6303
6304# qhasm: xmm13 ^= xmm14
6305# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
6306# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
6307pxor %xmm14,%xmm13
6308
6309# qhasm: xmm10 ^= xmm9
6310# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
6311# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
6312pxor %xmm9,%xmm10
6313
6314# qhasm: xmm13 ^= xmm8
6315# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
6316# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
6317pxor %xmm8,%xmm13
6318
6319# qhasm: xmm14 ^= xmm10
6320# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
6321# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
6322pxor %xmm10,%xmm14
6323
6324# qhasm: xmm11 ^= xmm8
6325# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
6326# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
6327pxor %xmm8,%xmm11
6328
6329# qhasm: xmm14 ^= xmm11
6330# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
6331# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
6332pxor %xmm11,%xmm14
6333
6334# qhasm: xmm11 ^= xmm15
6335# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
6336# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
6337pxor %xmm15,%xmm11
6338
6339# qhasm: xmm11 ^= xmm12
6340# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
6341# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
6342pxor %xmm12,%xmm11
6343
6344# qhasm: xmm15 ^= xmm13
6345# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
6346# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
6347pxor %xmm13,%xmm15
6348
6349# qhasm: xmm11 ^= xmm9
6350# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
6351# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
6352pxor %xmm9,%xmm11
6353
6354# qhasm: xmm12 ^= xmm13
6355# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
6356# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
6357pxor %xmm13,%xmm12
6358
6359# qhasm: xmm10 ^= xmm15
6360# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
6361# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
6362pxor %xmm15,%xmm10
6363
6364# qhasm: xmm9 ^= xmm13
6365# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
6366# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
6367pxor %xmm13,%xmm9
6368
6369# qhasm: xmm3 = xmm15
6370# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
6371# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
6372movdqa %xmm15,%xmm0
6373
6374# qhasm: xmm2 = xmm9
6375# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
6376# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
6377movdqa %xmm9,%xmm1
6378
6379# qhasm: xmm1 = xmm13
6380# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
6381# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
6382movdqa %xmm13,%xmm2
6383
6384# qhasm: xmm5 = xmm10
6385# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
6386# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
6387movdqa %xmm10,%xmm3
6388
6389# qhasm: xmm4 = xmm14
6390# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
6391# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
6392movdqa %xmm14,%xmm4
6393
6394# qhasm: xmm3 ^= xmm12
6395# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
6396# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
6397pxor %xmm12,%xmm0
6398
6399# qhasm: xmm2 ^= xmm10
6400# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
6401# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
6402pxor %xmm10,%xmm1
6403
6404# qhasm: xmm1 ^= xmm11
6405# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
6406# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
6407pxor %xmm11,%xmm2
6408
6409# qhasm: xmm5 ^= xmm12
6410# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
6411# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
6412pxor %xmm12,%xmm3
6413
6414# qhasm: xmm4 ^= xmm8
6415# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
6416# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
6417pxor %xmm8,%xmm4
6418
6419# qhasm: xmm6 = xmm3
6420# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
6421# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
6422movdqa %xmm0,%xmm5
6423
6424# qhasm: xmm0 = xmm2
6425# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
6426# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
6427movdqa %xmm1,%xmm6
6428
6429# qhasm: xmm7 = xmm3
6430# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
6431# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
6432movdqa %xmm0,%xmm7
6433
6434# qhasm: xmm2 |= xmm1
6435# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
6436# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
6437por %xmm2,%xmm1
6438
6439# qhasm: xmm3 |= xmm4
6440# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
6441# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
6442por %xmm4,%xmm0
6443
6444# qhasm: xmm7 ^= xmm0
6445# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
6446# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
6447pxor %xmm6,%xmm7
6448
6449# qhasm: xmm6 &= xmm4
6450# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
6451# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
6452pand %xmm4,%xmm5
6453
6454# qhasm: xmm0 &= xmm1
6455# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
6456# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
6457pand %xmm2,%xmm6
6458
6459# qhasm: xmm4 ^= xmm1
6460# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
6461# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
6462pxor %xmm2,%xmm4
6463
6464# qhasm: xmm7 &= xmm4
6465# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
6466# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
6467pand %xmm4,%xmm7
6468
6469# qhasm: xmm4 = xmm11
6470# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
6471# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
6472movdqa %xmm11,%xmm2
6473
6474# qhasm: xmm4 ^= xmm8
6475# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
6476# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
6477pxor %xmm8,%xmm2
6478
6479# qhasm: xmm5 &= xmm4
6480# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
6481# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
6482pand %xmm2,%xmm3
6483
6484# qhasm: xmm3 ^= xmm5
6485# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
6486# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
6487pxor %xmm3,%xmm0
6488
6489# qhasm: xmm2 ^= xmm5
6490# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6491# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6492pxor %xmm3,%xmm1
6493
6494# qhasm: xmm5 = xmm15
6495# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
6496# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
6497movdqa %xmm15,%xmm2
6498
6499# qhasm: xmm5 ^= xmm9
6500# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
6501# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
6502pxor %xmm9,%xmm2
6503
6504# qhasm: xmm4 = xmm13
6505# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
6506# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
6507movdqa %xmm13,%xmm3
6508
6509# qhasm: xmm1 = xmm5
6510# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
6511# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
6512movdqa %xmm2,%xmm4
6513
6514# qhasm: xmm4 ^= xmm14
6515# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
6516# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
6517pxor %xmm14,%xmm3
6518
6519# qhasm: xmm1 |= xmm4
6520# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
6521# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
6522por %xmm3,%xmm4
6523
6524# qhasm: xmm5 &= xmm4
6525# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
6526# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
6527pand %xmm3,%xmm2
6528
6529# qhasm: xmm0 ^= xmm5
6530# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
6531# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
6532pxor %xmm2,%xmm6
6533
6534# qhasm: xmm3 ^= xmm7
6535# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
6536# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
6537pxor %xmm7,%xmm0
6538
6539# qhasm: xmm2 ^= xmm6
6540# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
6541# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
6542pxor %xmm5,%xmm1
6543
6544# qhasm: xmm1 ^= xmm7
6545# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
6546# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
6547pxor %xmm7,%xmm4
6548
6549# qhasm: xmm0 ^= xmm6
6550# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
6551# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
6552pxor %xmm5,%xmm6
6553
6554# qhasm: xmm1 ^= xmm6
6555# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6556# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6557pxor %xmm5,%xmm4
6558
6559# qhasm: xmm4 = xmm10
6560# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
6561# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
6562movdqa %xmm10,%xmm2
6563
6564# qhasm: xmm5 = xmm12
6565# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
6566# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
6567movdqa %xmm12,%xmm3
6568
6569# qhasm: xmm6 = xmm9
6570# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
6571# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
6572movdqa %xmm9,%xmm5
6573
6574# qhasm: xmm7 = xmm15
6575# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
6576# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
6577movdqa %xmm15,%xmm7
6578
6579# qhasm: xmm4 &= xmm11
6580# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
6581# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
6582pand %xmm11,%xmm2
6583
6584# qhasm: xmm5 &= xmm8
6585# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
6586# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
6587pand %xmm8,%xmm3
6588
6589# qhasm: xmm6 &= xmm13
6590# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
6591# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
6592pand %xmm13,%xmm5
6593
6594# qhasm: xmm7 |= xmm14
6595# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
6596# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
6597por %xmm14,%xmm7
6598
6599# qhasm: xmm3 ^= xmm4
6600# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
6601# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
6602pxor %xmm2,%xmm0
6603
6604# qhasm: xmm2 ^= xmm5
6605# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
6606# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
6607pxor %xmm3,%xmm1
6608
6609# qhasm: xmm1 ^= xmm6
6610# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
6611# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
6612pxor %xmm5,%xmm4
6613
6614# qhasm: xmm0 ^= xmm7
6615# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
6616# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
6617pxor %xmm7,%xmm6
6618
6619# qhasm: xmm4 = xmm3
6620# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
6621# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
6622movdqa %xmm0,%xmm2
6623
6624# qhasm: xmm4 ^= xmm2
6625# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
6626# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
6627pxor %xmm1,%xmm2
6628
6629# qhasm: xmm3 &= xmm1
6630# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
6631# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
6632pand %xmm4,%xmm0
6633
6634# qhasm: xmm6 = xmm0
6635# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
6636# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
6637movdqa %xmm6,%xmm3
6638
6639# qhasm: xmm6 ^= xmm3
6640# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
6641# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
6642pxor %xmm0,%xmm3
6643
6644# qhasm: xmm7 = xmm4
6645# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
6646# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
6647movdqa %xmm2,%xmm5
6648
6649# qhasm: xmm7 &= xmm6
6650# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
6651# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
6652pand %xmm3,%xmm5
6653
6654# qhasm: xmm7 ^= xmm2
6655# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
6656# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
6657pxor %xmm1,%xmm5
6658
6659# qhasm: xmm5 = xmm1
6660# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
6661# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
6662movdqa %xmm4,%xmm7
6663
6664# qhasm: xmm5 ^= xmm0
6665# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6666# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6667pxor %xmm6,%xmm7
6668
6669# qhasm: xmm3 ^= xmm2
6670# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
6671# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
6672pxor %xmm1,%xmm0
6673
6674# qhasm: xmm5 &= xmm3
6675# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
6676# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
6677pand %xmm0,%xmm7
6678
6679# qhasm: xmm5 ^= xmm0
6680# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
6681# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
6682pxor %xmm6,%xmm7
6683
6684# qhasm: xmm1 ^= xmm5
6685# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
6686# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
6687pxor %xmm7,%xmm4
6688
6689# qhasm: xmm2 = xmm6
6690# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
6691# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
6692movdqa %xmm3,%xmm0
6693
6694# qhasm: xmm2 ^= xmm5
6695# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
6696# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
6697pxor %xmm7,%xmm0
6698
6699# qhasm: xmm2 &= xmm0
6700# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
6701# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
6702pand %xmm6,%xmm0
6703
6704# qhasm: xmm1 ^= xmm2
6705# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
6706# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
6707pxor %xmm0,%xmm4
6708
6709# qhasm: xmm6 ^= xmm2
6710# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
6711# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
6712pxor %xmm0,%xmm3
6713
6714# qhasm: xmm6 &= xmm7
6715# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
6716# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
6717pand %xmm5,%xmm3
6718
6719# qhasm: xmm6 ^= xmm4
6720# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
6721# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
6722pxor %xmm2,%xmm3
6723
6724# qhasm: xmm4 = xmm14
6725# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
6726# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
6727movdqa %xmm14,%xmm0
6728
6729# qhasm: xmm0 = xmm13
6730# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
6731# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
6732movdqa %xmm13,%xmm1
6733
6734# qhasm: xmm2 = xmm7
6735# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
6736# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
6737movdqa %xmm5,%xmm2
6738
6739# qhasm: xmm2 ^= xmm6
6740# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
6741# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
6742pxor %xmm3,%xmm2
6743
6744# qhasm: xmm2 &= xmm14
6745# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
6746# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
6747pand %xmm14,%xmm2
6748
6749# qhasm: xmm14 ^= xmm13
6750# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6751# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6752pxor %xmm13,%xmm14
6753
6754# qhasm: xmm14 &= xmm6
6755# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
6756# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
6757pand %xmm3,%xmm14
6758
6759# qhasm: xmm13 &= xmm7
6760# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
6761# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
6762pand %xmm5,%xmm13
6763
6764# qhasm: xmm14 ^= xmm13
6765# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
6766# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
6767pxor %xmm13,%xmm14
6768
6769# qhasm: xmm13 ^= xmm2
6770# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
6771# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
6772pxor %xmm2,%xmm13
6773
6774# qhasm: xmm4 ^= xmm8
6775# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
6776# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
6777pxor %xmm8,%xmm0
6778
6779# qhasm: xmm0 ^= xmm11
6780# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
6781# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
6782pxor %xmm11,%xmm1
6783
6784# qhasm: xmm7 ^= xmm5
6785# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6786# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6787pxor %xmm7,%xmm5
6788
6789# qhasm: xmm6 ^= xmm1
6790# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
6791# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
6792pxor %xmm4,%xmm3
6793
6794# qhasm: xmm3 = xmm7
6795# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6796# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6797movdqa %xmm5,%xmm2
6798
6799# qhasm: xmm3 ^= xmm6
6800# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6801# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6802pxor %xmm3,%xmm2
6803
6804# qhasm: xmm3 &= xmm4
6805# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6806# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6807pand %xmm0,%xmm2
6808
6809# qhasm: xmm4 ^= xmm0
6810# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6811# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6812pxor %xmm1,%xmm0
6813
6814# qhasm: xmm4 &= xmm6
6815# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6816# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6817pand %xmm3,%xmm0
6818
6819# qhasm: xmm0 &= xmm7
6820# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6821# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6822pand %xmm5,%xmm1
6823
6824# qhasm: xmm0 ^= xmm4
6825# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6826# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6827pxor %xmm0,%xmm1
6828
6829# qhasm: xmm4 ^= xmm3
6830# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6831# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6832pxor %xmm2,%xmm0
6833
6834# qhasm: xmm2 = xmm5
6835# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6836# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6837movdqa %xmm7,%xmm2
6838
6839# qhasm: xmm2 ^= xmm1
6840# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6841# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6842pxor %xmm4,%xmm2
6843
6844# qhasm: xmm2 &= xmm8
6845# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
6846# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
6847pand %xmm8,%xmm2
6848
6849# qhasm: xmm8 ^= xmm11
6850# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6851# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6852pxor %xmm11,%xmm8
6853
6854# qhasm: xmm8 &= xmm1
6855# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
6856# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
6857pand %xmm4,%xmm8
6858
6859# qhasm: xmm11 &= xmm5
6860# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
6861# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
6862pand %xmm7,%xmm11
6863
6864# qhasm: xmm8 ^= xmm11
6865# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
6866# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
6867pxor %xmm11,%xmm8
6868
6869# qhasm: xmm11 ^= xmm2
6870# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
6871# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
6872pxor %xmm2,%xmm11
6873
6874# qhasm: xmm14 ^= xmm4
6875# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
6876# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
6877pxor %xmm0,%xmm14
6878
6879# qhasm: xmm8 ^= xmm4
6880# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
6881# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
6882pxor %xmm0,%xmm8
6883
6884# qhasm: xmm13 ^= xmm0
6885# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
6886# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
6887pxor %xmm1,%xmm13
6888
6889# qhasm: xmm11 ^= xmm0
6890# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
6891# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
6892pxor %xmm1,%xmm11
6893
6894# qhasm: xmm4 = xmm15
6895# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
6896# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
6897movdqa %xmm15,%xmm0
6898
6899# qhasm: xmm0 = xmm9
6900# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
6901# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
6902movdqa %xmm9,%xmm1
6903
6904# qhasm: xmm4 ^= xmm12
6905# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
6906# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
6907pxor %xmm12,%xmm0
6908
6909# qhasm: xmm0 ^= xmm10
6910# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
6911# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
6912pxor %xmm10,%xmm1
6913
6914# qhasm: xmm3 = xmm7
6915# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
6916# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
6917movdqa %xmm5,%xmm2
6918
6919# qhasm: xmm3 ^= xmm6
6920# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
6921# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
6922pxor %xmm3,%xmm2
6923
6924# qhasm: xmm3 &= xmm4
6925# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
6926# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
6927pand %xmm0,%xmm2
6928
6929# qhasm: xmm4 ^= xmm0
6930# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
6931# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
6932pxor %xmm1,%xmm0
6933
6934# qhasm: xmm4 &= xmm6
6935# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
6936# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
6937pand %xmm3,%xmm0
6938
6939# qhasm: xmm0 &= xmm7
6940# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
6941# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
6942pand %xmm5,%xmm1
6943
6944# qhasm: xmm0 ^= xmm4
6945# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
6946# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
6947pxor %xmm0,%xmm1
6948
6949# qhasm: xmm4 ^= xmm3
6950# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
6951# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
6952pxor %xmm2,%xmm0
6953
6954# qhasm: xmm2 = xmm5
6955# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
6956# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
6957movdqa %xmm7,%xmm2
6958
6959# qhasm: xmm2 ^= xmm1
6960# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
6961# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
6962pxor %xmm4,%xmm2
6963
6964# qhasm: xmm2 &= xmm12
6965# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
6966# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
6967pand %xmm12,%xmm2
6968
6969# qhasm: xmm12 ^= xmm10
6970# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6971# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6972pxor %xmm10,%xmm12
6973
6974# qhasm: xmm12 &= xmm1
6975# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
6976# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
6977pand %xmm4,%xmm12
6978
6979# qhasm: xmm10 &= xmm5
6980# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
6981# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
6982pand %xmm7,%xmm10
6983
6984# qhasm: xmm12 ^= xmm10
6985# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
6986# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
6987pxor %xmm10,%xmm12
6988
6989# qhasm: xmm10 ^= xmm2
6990# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
6991# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
6992pxor %xmm2,%xmm10
6993
6994# qhasm: xmm7 ^= xmm5
6995# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
6996# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
6997pxor %xmm7,%xmm5
6998
6999# qhasm: xmm6 ^= xmm1
7000# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
7001# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
7002pxor %xmm4,%xmm3
7003
7004# qhasm: xmm3 = xmm7
7005# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
7006# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
7007movdqa %xmm5,%xmm2
7008
7009# qhasm: xmm3 ^= xmm6
7010# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
7011# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
7012pxor %xmm3,%xmm2
7013
7014# qhasm: xmm3 &= xmm15
7015# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
7016# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
7017pand %xmm15,%xmm2
7018
7019# qhasm: xmm15 ^= xmm9
7020# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7021# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7022pxor %xmm9,%xmm15
7023
7024# qhasm: xmm15 &= xmm6
7025# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
7026# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
7027pand %xmm3,%xmm15
7028
7029# qhasm: xmm9 &= xmm7
7030# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
7031# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
7032pand %xmm5,%xmm9
7033
7034# qhasm: xmm15 ^= xmm9
7035# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
7036# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
7037pxor %xmm9,%xmm15
7038
7039# qhasm: xmm9 ^= xmm3
7040# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
7041# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
7042pxor %xmm2,%xmm9
7043
7044# qhasm: xmm15 ^= xmm4
7045# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
7046# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
7047pxor %xmm0,%xmm15
7048
7049# qhasm: xmm12 ^= xmm4
7050# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
7051# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
7052pxor %xmm0,%xmm12
7053
7054# qhasm: xmm9 ^= xmm0
7055# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
7056# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
7057pxor %xmm1,%xmm9
7058
7059# qhasm: xmm10 ^= xmm0
7060# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
7061# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
7062pxor %xmm1,%xmm10
7063
7064# qhasm: xmm15 ^= xmm8
7065# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
7066# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
7067pxor %xmm8,%xmm15
7068
7069# qhasm: xmm9 ^= xmm14
7070# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
7071# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
7072pxor %xmm14,%xmm9
7073
7074# qhasm: xmm12 ^= xmm15
7075# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
7076# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
7077pxor %xmm15,%xmm12
7078
7079# qhasm: xmm14 ^= xmm8
7080# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
7081# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
7082pxor %xmm8,%xmm14
7083
7084# qhasm: xmm8 ^= xmm9
7085# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
7086# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
7087pxor %xmm9,%xmm8
7088
7089# qhasm: xmm9 ^= xmm13
7090# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
7091# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
7092pxor %xmm13,%xmm9
7093
7094# qhasm: xmm13 ^= xmm10
7095# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
7096# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
7097pxor %xmm10,%xmm13
7098
7099# qhasm: xmm12 ^= xmm13
7100# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
7101# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
7102pxor %xmm13,%xmm12
7103
7104# qhasm: xmm10 ^= xmm11
7105# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
7106# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
7107pxor %xmm11,%xmm10
7108
7109# qhasm: xmm11 ^= xmm13
7110# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
7111# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
7112pxor %xmm13,%xmm11
7113
7114# qhasm: xmm14 ^= xmm11
7115# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
7116# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
7117pxor %xmm11,%xmm14
7118
7119# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
7120# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
7121# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
7122pshufd $0x93,%xmm8,%xmm0
7123
7124# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
7125# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
7126# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
7127pshufd $0x93,%xmm9,%xmm1
7128
7129# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
7130# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
7131# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
7132pshufd $0x93,%xmm12,%xmm2
7133
7134# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
7135# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
7136# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
7137pshufd $0x93,%xmm14,%xmm3
7138
7139# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
7140# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
7141# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
7142pshufd $0x93,%xmm11,%xmm4
7143
7144# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
7145# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
7146# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
7147pshufd $0x93,%xmm15,%xmm5
7148
7149# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
7150# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
7151# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
7152pshufd $0x93,%xmm10,%xmm6
7153
7154# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
7155# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
7156# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
7157pshufd $0x93,%xmm13,%xmm7
7158
7159# qhasm: xmm8 ^= xmm0
7160# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
7161# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
7162pxor %xmm0,%xmm8
7163
7164# qhasm: xmm9 ^= xmm1
7165# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
7166# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
7167pxor %xmm1,%xmm9
7168
7169# qhasm: xmm12 ^= xmm2
7170# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
7171# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
7172pxor %xmm2,%xmm12
7173
7174# qhasm: xmm14 ^= xmm3
7175# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
7176# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
7177pxor %xmm3,%xmm14
7178
7179# qhasm: xmm11 ^= xmm4
7180# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
7181# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
7182pxor %xmm4,%xmm11
7183
7184# qhasm: xmm15 ^= xmm5
7185# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
7186# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
7187pxor %xmm5,%xmm15
7188
7189# qhasm: xmm10 ^= xmm6
7190# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
7191# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
7192pxor %xmm6,%xmm10
7193
7194# qhasm: xmm13 ^= xmm7
7195# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
7196# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
7197pxor %xmm7,%xmm13
7198
7199# qhasm: xmm0 ^= xmm13
7200# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
7201# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
7202pxor %xmm13,%xmm0
7203
7204# qhasm: xmm1 ^= xmm8
7205# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
7206# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
7207pxor %xmm8,%xmm1
7208
7209# qhasm: xmm2 ^= xmm9
7210# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
7211# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
7212pxor %xmm9,%xmm2
7213
7214# qhasm: xmm1 ^= xmm13
7215# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
7216# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
7217pxor %xmm13,%xmm1
7218
7219# qhasm: xmm3 ^= xmm12
7220# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
7221# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
7222pxor %xmm12,%xmm3
7223
7224# qhasm: xmm4 ^= xmm14
7225# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
7226# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
7227pxor %xmm14,%xmm4
7228
7229# qhasm: xmm5 ^= xmm11
7230# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
7231# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
7232pxor %xmm11,%xmm5
7233
7234# qhasm: xmm3 ^= xmm13
7235# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
7236# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
7237pxor %xmm13,%xmm3
7238
7239# qhasm: xmm6 ^= xmm15
7240# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
7241# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
7242pxor %xmm15,%xmm6
7243
7244# qhasm: xmm7 ^= xmm10
7245# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
7246# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
7247pxor %xmm10,%xmm7
7248
7249# qhasm: xmm4 ^= xmm13
7250# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
7251# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
7252pxor %xmm13,%xmm4
7253
7254# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
7255# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
7256# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
7257pshufd $0x4E,%xmm8,%xmm8
7258
7259# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
7260# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
7261# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
7262pshufd $0x4E,%xmm9,%xmm9
7263
7264# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
7265# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
7266# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
7267pshufd $0x4E,%xmm12,%xmm12
7268
7269# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
7270# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
7271# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
7272pshufd $0x4E,%xmm14,%xmm14
7273
7274# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
7275# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
7276# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
7277pshufd $0x4E,%xmm11,%xmm11
7278
7279# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
7280# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
7281# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
7282pshufd $0x4E,%xmm15,%xmm15
7283
7284# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
7285# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
7286# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
7287pshufd $0x4E,%xmm10,%xmm10
7288
7289# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
7290# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
7291# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
7292pshufd $0x4E,%xmm13,%xmm13
7293
7294# qhasm: xmm0 ^= xmm8
7295# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
7296# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
7297pxor %xmm8,%xmm0
7298
7299# qhasm: xmm1 ^= xmm9
7300# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
7301# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
7302pxor %xmm9,%xmm1
7303
7304# qhasm: xmm2 ^= xmm12
7305# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
7306# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
7307pxor %xmm12,%xmm2
7308
7309# qhasm: xmm3 ^= xmm14
7310# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
7311# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
7312pxor %xmm14,%xmm3
7313
7314# qhasm: xmm4 ^= xmm11
7315# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
7316# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
7317pxor %xmm11,%xmm4
7318
7319# qhasm: xmm5 ^= xmm15
7320# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
7321# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
7322pxor %xmm15,%xmm5
7323
7324# qhasm: xmm6 ^= xmm10
7325# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
7326# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
7327pxor %xmm10,%xmm6
7328
7329# qhasm: xmm7 ^= xmm13
7330# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
7331# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
7332pxor %xmm13,%xmm7
7333
7334# qhasm: xmm0 ^= *(int128 *)(c + 768)
7335# asm 1: pxor 768(<c=int64#5),<xmm0=int6464#1
7336# asm 2: pxor 768(<c=%r8),<xmm0=%xmm0
7337pxor 768(%r8),%xmm0
7338
7339# qhasm: shuffle bytes of xmm0 by SR
7340# asm 1: pshufb SR,<xmm0=int6464#1
7341# asm 2: pshufb SR,<xmm0=%xmm0
7342pshufb SR,%xmm0
7343
7344# qhasm: xmm1 ^= *(int128 *)(c + 784)
7345# asm 1: pxor 784(<c=int64#5),<xmm1=int6464#2
7346# asm 2: pxor 784(<c=%r8),<xmm1=%xmm1
7347pxor 784(%r8),%xmm1
7348
7349# qhasm: shuffle bytes of xmm1 by SR
7350# asm 1: pshufb SR,<xmm1=int6464#2
7351# asm 2: pshufb SR,<xmm1=%xmm1
7352pshufb SR,%xmm1
7353
7354# qhasm: xmm2 ^= *(int128 *)(c + 800)
7355# asm 1: pxor 800(<c=int64#5),<xmm2=int6464#3
7356# asm 2: pxor 800(<c=%r8),<xmm2=%xmm2
7357pxor 800(%r8),%xmm2
7358
7359# qhasm: shuffle bytes of xmm2 by SR
7360# asm 1: pshufb SR,<xmm2=int6464#3
7361# asm 2: pshufb SR,<xmm2=%xmm2
7362pshufb SR,%xmm2
7363
7364# qhasm: xmm3 ^= *(int128 *)(c + 816)
7365# asm 1: pxor 816(<c=int64#5),<xmm3=int6464#4
7366# asm 2: pxor 816(<c=%r8),<xmm3=%xmm3
7367pxor 816(%r8),%xmm3
7368
7369# qhasm: shuffle bytes of xmm3 by SR
7370# asm 1: pshufb SR,<xmm3=int6464#4
7371# asm 2: pshufb SR,<xmm3=%xmm3
7372pshufb SR,%xmm3
7373
7374# qhasm: xmm4 ^= *(int128 *)(c + 832)
7375# asm 1: pxor 832(<c=int64#5),<xmm4=int6464#5
7376# asm 2: pxor 832(<c=%r8),<xmm4=%xmm4
7377pxor 832(%r8),%xmm4
7378
7379# qhasm: shuffle bytes of xmm4 by SR
7380# asm 1: pshufb SR,<xmm4=int6464#5
7381# asm 2: pshufb SR,<xmm4=%xmm4
7382pshufb SR,%xmm4
7383
7384# qhasm: xmm5 ^= *(int128 *)(c + 848)
7385# asm 1: pxor 848(<c=int64#5),<xmm5=int6464#6
7386# asm 2: pxor 848(<c=%r8),<xmm5=%xmm5
7387pxor 848(%r8),%xmm5
7388
7389# qhasm: shuffle bytes of xmm5 by SR
7390# asm 1: pshufb SR,<xmm5=int6464#6
7391# asm 2: pshufb SR,<xmm5=%xmm5
7392pshufb SR,%xmm5
7393
7394# qhasm: xmm6 ^= *(int128 *)(c + 864)
7395# asm 1: pxor 864(<c=int64#5),<xmm6=int6464#7
7396# asm 2: pxor 864(<c=%r8),<xmm6=%xmm6
7397pxor 864(%r8),%xmm6
7398
7399# qhasm: shuffle bytes of xmm6 by SR
7400# asm 1: pshufb SR,<xmm6=int6464#7
7401# asm 2: pshufb SR,<xmm6=%xmm6
7402pshufb SR,%xmm6
7403
7404# qhasm: xmm7 ^= *(int128 *)(c + 880)
7405# asm 1: pxor 880(<c=int64#5),<xmm7=int6464#8
7406# asm 2: pxor 880(<c=%r8),<xmm7=%xmm7
7407pxor 880(%r8),%xmm7
7408
7409# qhasm: shuffle bytes of xmm7 by SR
7410# asm 1: pshufb SR,<xmm7=int6464#8
7411# asm 2: pshufb SR,<xmm7=%xmm7
7412pshufb SR,%xmm7
7413
7414# qhasm: xmm5 ^= xmm6
7415# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
7416# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
7417pxor %xmm6,%xmm5
7418
7419# qhasm: xmm2 ^= xmm1
7420# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
7421# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
7422pxor %xmm1,%xmm2
7423
7424# qhasm: xmm5 ^= xmm0
7425# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
7426# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
7427pxor %xmm0,%xmm5
7428
7429# qhasm: xmm6 ^= xmm2
7430# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
7431# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
7432pxor %xmm2,%xmm6
7433
7434# qhasm: xmm3 ^= xmm0
7435# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
7436# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
7437pxor %xmm0,%xmm3
7438
7439# qhasm: xmm6 ^= xmm3
7440# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
7441# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
7442pxor %xmm3,%xmm6
7443
7444# qhasm: xmm3 ^= xmm7
7445# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
7446# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
7447pxor %xmm7,%xmm3
7448
7449# qhasm: xmm3 ^= xmm4
7450# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
7451# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
7452pxor %xmm4,%xmm3
7453
7454# qhasm: xmm7 ^= xmm5
7455# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
7456# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
7457pxor %xmm5,%xmm7
7458
7459# qhasm: xmm3 ^= xmm1
7460# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
7461# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
7462pxor %xmm1,%xmm3
7463
7464# qhasm: xmm4 ^= xmm5
7465# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
7466# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
7467pxor %xmm5,%xmm4
7468
7469# qhasm: xmm2 ^= xmm7
7470# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
7471# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
7472pxor %xmm7,%xmm2
7473
7474# qhasm: xmm1 ^= xmm5
7475# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
7476# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
7477pxor %xmm5,%xmm1
7478
7479# qhasm: xmm11 = xmm7
7480# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
7481# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
7482movdqa %xmm7,%xmm8
7483
7484# qhasm: xmm10 = xmm1
7485# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
7486# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
7487movdqa %xmm1,%xmm9
7488
7489# qhasm: xmm9 = xmm5
7490# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
7491# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
7492movdqa %xmm5,%xmm10
7493
7494# qhasm: xmm13 = xmm2
7495# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
7496# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
7497movdqa %xmm2,%xmm11
7498
7499# qhasm: xmm12 = xmm6
7500# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
7501# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
7502movdqa %xmm6,%xmm12
7503
7504# qhasm: xmm11 ^= xmm4
7505# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
7506# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
7507pxor %xmm4,%xmm8
7508
7509# qhasm: xmm10 ^= xmm2
7510# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
7511# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
7512pxor %xmm2,%xmm9
7513
7514# qhasm: xmm9 ^= xmm3
7515# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
7516# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
7517pxor %xmm3,%xmm10
7518
7519# qhasm: xmm13 ^= xmm4
7520# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
7521# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
7522pxor %xmm4,%xmm11
7523
7524# qhasm: xmm12 ^= xmm0
7525# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
7526# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
7527pxor %xmm0,%xmm12
7528
7529# qhasm: xmm14 = xmm11
7530# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
7531# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
7532movdqa %xmm8,%xmm13
7533
7534# qhasm: xmm8 = xmm10
7535# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
7536# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
7537movdqa %xmm9,%xmm14
7538
7539# qhasm: xmm15 = xmm11
7540# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
7541# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
7542movdqa %xmm8,%xmm15
7543
7544# qhasm: xmm10 |= xmm9
7545# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
7546# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
7547por %xmm10,%xmm9
7548
7549# qhasm: xmm11 |= xmm12
7550# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
7551# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
7552por %xmm12,%xmm8
7553
7554# qhasm: xmm15 ^= xmm8
7555# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
7556# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
7557pxor %xmm14,%xmm15
7558
7559# qhasm: xmm14 &= xmm12
7560# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
7561# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
7562pand %xmm12,%xmm13
7563
7564# qhasm: xmm8 &= xmm9
7565# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
7566# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
7567pand %xmm10,%xmm14
7568
7569# qhasm: xmm12 ^= xmm9
7570# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
7571# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
7572pxor %xmm10,%xmm12
7573
7574# qhasm: xmm15 &= xmm12
7575# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
7576# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
7577pand %xmm12,%xmm15
7578
7579# qhasm: xmm12 = xmm3
7580# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
7581# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
7582movdqa %xmm3,%xmm10
7583
7584# qhasm: xmm12 ^= xmm0
7585# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
7586# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
7587pxor %xmm0,%xmm10
7588
7589# qhasm: xmm13 &= xmm12
7590# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
7591# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
7592pand %xmm10,%xmm11
7593
7594# qhasm: xmm11 ^= xmm13
7595# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
7596# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
7597pxor %xmm11,%xmm8
7598
7599# qhasm: xmm10 ^= xmm13
7600# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7601# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7602pxor %xmm11,%xmm9
7603
7604# qhasm: xmm13 = xmm7
7605# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
7606# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
7607movdqa %xmm7,%xmm10
7608
7609# qhasm: xmm13 ^= xmm1
7610# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
7611# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
7612pxor %xmm1,%xmm10
7613
7614# qhasm: xmm12 = xmm5
7615# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
7616# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
7617movdqa %xmm5,%xmm11
7618
7619# qhasm: xmm9 = xmm13
7620# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
7621# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
7622movdqa %xmm10,%xmm12
7623
7624# qhasm: xmm12 ^= xmm6
7625# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
7626# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
7627pxor %xmm6,%xmm11
7628
7629# qhasm: xmm9 |= xmm12
7630# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
7631# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
7632por %xmm11,%xmm12
7633
7634# qhasm: xmm13 &= xmm12
7635# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
7636# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
7637pand %xmm11,%xmm10
7638
7639# qhasm: xmm8 ^= xmm13
7640# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
7641# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
7642pxor %xmm10,%xmm14
7643
7644# qhasm: xmm11 ^= xmm15
7645# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
7646# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
7647pxor %xmm15,%xmm8
7648
7649# qhasm: xmm10 ^= xmm14
7650# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
7651# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
7652pxor %xmm13,%xmm9
7653
7654# qhasm: xmm9 ^= xmm15
7655# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
7656# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
7657pxor %xmm15,%xmm12
7658
7659# qhasm: xmm8 ^= xmm14
7660# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
7661# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
7662pxor %xmm13,%xmm14
7663
7664# qhasm: xmm9 ^= xmm14
7665# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7666# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7667pxor %xmm13,%xmm12
7668
7669# qhasm: xmm12 = xmm2
7670# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
7671# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
7672movdqa %xmm2,%xmm10
7673
7674# qhasm: xmm13 = xmm4
7675# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
7676# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
7677movdqa %xmm4,%xmm11
7678
7679# qhasm: xmm14 = xmm1
7680# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
7681# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
7682movdqa %xmm1,%xmm13
7683
7684# qhasm: xmm15 = xmm7
7685# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
7686# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
7687movdqa %xmm7,%xmm15
7688
7689# qhasm: xmm12 &= xmm3
7690# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
7691# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
7692pand %xmm3,%xmm10
7693
7694# qhasm: xmm13 &= xmm0
7695# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
7696# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
7697pand %xmm0,%xmm11
7698
7699# qhasm: xmm14 &= xmm5
7700# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
7701# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
7702pand %xmm5,%xmm13
7703
7704# qhasm: xmm15 |= xmm6
7705# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
7706# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
7707por %xmm6,%xmm15
7708
7709# qhasm: xmm11 ^= xmm12
7710# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
7711# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
7712pxor %xmm10,%xmm8
7713
7714# qhasm: xmm10 ^= xmm13
7715# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
7716# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
7717pxor %xmm11,%xmm9
7718
7719# qhasm: xmm9 ^= xmm14
7720# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
7721# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
7722pxor %xmm13,%xmm12
7723
7724# qhasm: xmm8 ^= xmm15
7725# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
7726# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
7727pxor %xmm15,%xmm14
7728
7729# qhasm: xmm12 = xmm11
7730# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
7731# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
7732movdqa %xmm8,%xmm10
7733
7734# qhasm: xmm12 ^= xmm10
7735# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
7736# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
7737pxor %xmm9,%xmm10
7738
7739# qhasm: xmm11 &= xmm9
7740# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
7741# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
7742pand %xmm12,%xmm8
7743
7744# qhasm: xmm14 = xmm8
7745# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
7746# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
7747movdqa %xmm14,%xmm11
7748
7749# qhasm: xmm14 ^= xmm11
7750# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
7751# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
7752pxor %xmm8,%xmm11
7753
7754# qhasm: xmm15 = xmm12
7755# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
7756# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
7757movdqa %xmm10,%xmm13
7758
7759# qhasm: xmm15 &= xmm14
7760# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
7761# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
7762pand %xmm11,%xmm13
7763
7764# qhasm: xmm15 ^= xmm10
7765# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
7766# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
7767pxor %xmm9,%xmm13
7768
7769# qhasm: xmm13 = xmm9
7770# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
7771# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
7772movdqa %xmm12,%xmm15
7773
7774# qhasm: xmm13 ^= xmm8
7775# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7776# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7777pxor %xmm14,%xmm15
7778
7779# qhasm: xmm11 ^= xmm10
7780# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
7781# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
7782pxor %xmm9,%xmm8
7783
7784# qhasm: xmm13 &= xmm11
7785# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
7786# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
7787pand %xmm8,%xmm15
7788
7789# qhasm: xmm13 ^= xmm8
7790# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
7791# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
7792pxor %xmm14,%xmm15
7793
7794# qhasm: xmm9 ^= xmm13
7795# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
7796# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
7797pxor %xmm15,%xmm12
7798
7799# qhasm: xmm10 = xmm14
7800# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
7801# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
7802movdqa %xmm11,%xmm8
7803
7804# qhasm: xmm10 ^= xmm13
7805# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
7806# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
7807pxor %xmm15,%xmm8
7808
7809# qhasm: xmm10 &= xmm8
7810# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
7811# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
7812pand %xmm14,%xmm8
7813
7814# qhasm: xmm9 ^= xmm10
7815# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
7816# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
7817pxor %xmm8,%xmm12
7818
7819# qhasm: xmm14 ^= xmm10
7820# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
7821# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
7822pxor %xmm8,%xmm11
7823
7824# qhasm: xmm14 &= xmm15
7825# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
7826# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
7827pand %xmm13,%xmm11
7828
7829# qhasm: xmm14 ^= xmm12
7830# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
7831# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
7832pxor %xmm10,%xmm11
7833
7834# qhasm: xmm12 = xmm6
7835# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
7836# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
7837movdqa %xmm6,%xmm8
7838
7839# qhasm: xmm8 = xmm5
7840# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
7841# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
7842movdqa %xmm5,%xmm9
7843
7844# qhasm: xmm10 = xmm15
7845# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
7846# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
7847movdqa %xmm13,%xmm10
7848
7849# qhasm: xmm10 ^= xmm14
7850# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
7851# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
7852pxor %xmm11,%xmm10
7853
7854# qhasm: xmm10 &= xmm6
7855# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
7856# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
7857pand %xmm6,%xmm10
7858
7859# qhasm: xmm6 ^= xmm5
7860# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7861# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7862pxor %xmm5,%xmm6
7863
7864# qhasm: xmm6 &= xmm14
7865# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
7866# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
7867pand %xmm11,%xmm6
7868
7869# qhasm: xmm5 &= xmm15
7870# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
7871# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
7872pand %xmm13,%xmm5
7873
7874# qhasm: xmm6 ^= xmm5
7875# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
7876# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
7877pxor %xmm5,%xmm6
7878
7879# qhasm: xmm5 ^= xmm10
7880# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
7881# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
7882pxor %xmm10,%xmm5
7883
7884# qhasm: xmm12 ^= xmm0
7885# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
7886# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
7887pxor %xmm0,%xmm8
7888
7889# qhasm: xmm8 ^= xmm3
7890# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
7891# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
7892pxor %xmm3,%xmm9
7893
7894# qhasm: xmm15 ^= xmm13
7895# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
7896# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
7897pxor %xmm15,%xmm13
7898
7899# qhasm: xmm14 ^= xmm9
7900# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
7901# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
7902pxor %xmm12,%xmm11
7903
7904# qhasm: xmm11 = xmm15
7905# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
7906# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
7907movdqa %xmm13,%xmm10
7908
7909# qhasm: xmm11 ^= xmm14
7910# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
7911# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
7912pxor %xmm11,%xmm10
7913
7914# qhasm: xmm11 &= xmm12
7915# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
7916# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
7917pand %xmm8,%xmm10
7918
7919# qhasm: xmm12 ^= xmm8
7920# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
7921# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
7922pxor %xmm9,%xmm8
7923
7924# qhasm: xmm12 &= xmm14
7925# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
7926# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
7927pand %xmm11,%xmm8
7928
7929# qhasm: xmm8 &= xmm15
7930# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
7931# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
7932pand %xmm13,%xmm9
7933
7934# qhasm: xmm8 ^= xmm12
7935# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
7936# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
7937pxor %xmm8,%xmm9
7938
7939# qhasm: xmm12 ^= xmm11
7940# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
7941# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
7942pxor %xmm10,%xmm8
7943
7944# qhasm: xmm10 = xmm13
7945# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
7946# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
7947movdqa %xmm15,%xmm10
7948
7949# qhasm: xmm10 ^= xmm9
7950# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
7951# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
7952pxor %xmm12,%xmm10
7953
7954# qhasm: xmm10 &= xmm0
7955# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
7956# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
7957pand %xmm0,%xmm10
7958
7959# qhasm: xmm0 ^= xmm3
7960# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7961# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7962pxor %xmm3,%xmm0
7963
7964# qhasm: xmm0 &= xmm9
7965# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
7966# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
7967pand %xmm12,%xmm0
7968
7969# qhasm: xmm3 &= xmm13
7970# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
7971# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
7972pand %xmm15,%xmm3
7973
7974# qhasm: xmm0 ^= xmm3
7975# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
7976# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
7977pxor %xmm3,%xmm0
7978
7979# qhasm: xmm3 ^= xmm10
7980# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
7981# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
7982pxor %xmm10,%xmm3
7983
7984# qhasm: xmm6 ^= xmm12
7985# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
7986# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
7987pxor %xmm8,%xmm6
7988
7989# qhasm: xmm0 ^= xmm12
7990# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
7991# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
7992pxor %xmm8,%xmm0
7993
7994# qhasm: xmm5 ^= xmm8
7995# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
7996# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
7997pxor %xmm9,%xmm5
7998
7999# qhasm: xmm3 ^= xmm8
8000# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
8001# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
8002pxor %xmm9,%xmm3
8003
8004# qhasm: xmm12 = xmm7
8005# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
8006# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
8007movdqa %xmm7,%xmm8
8008
8009# qhasm: xmm8 = xmm1
8010# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
8011# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
8012movdqa %xmm1,%xmm9
8013
8014# qhasm: xmm12 ^= xmm4
8015# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
8016# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
8017pxor %xmm4,%xmm8
8018
8019# qhasm: xmm8 ^= xmm2
8020# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
8021# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
8022pxor %xmm2,%xmm9
8023
8024# qhasm: xmm11 = xmm15
8025# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8026# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8027movdqa %xmm13,%xmm10
8028
8029# qhasm: xmm11 ^= xmm14
8030# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8031# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8032pxor %xmm11,%xmm10
8033
8034# qhasm: xmm11 &= xmm12
8035# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
8036# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
8037pand %xmm8,%xmm10
8038
8039# qhasm: xmm12 ^= xmm8
8040# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
8041# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
8042pxor %xmm9,%xmm8
8043
8044# qhasm: xmm12 &= xmm14
8045# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
8046# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
8047pand %xmm11,%xmm8
8048
8049# qhasm: xmm8 &= xmm15
8050# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
8051# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
8052pand %xmm13,%xmm9
8053
8054# qhasm: xmm8 ^= xmm12
8055# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
8056# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
8057pxor %xmm8,%xmm9
8058
8059# qhasm: xmm12 ^= xmm11
8060# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
8061# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
8062pxor %xmm10,%xmm8
8063
8064# qhasm: xmm10 = xmm13
8065# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
8066# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
8067movdqa %xmm15,%xmm10
8068
8069# qhasm: xmm10 ^= xmm9
8070# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
8071# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
8072pxor %xmm12,%xmm10
8073
8074# qhasm: xmm10 &= xmm4
8075# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
8076# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
8077pand %xmm4,%xmm10
8078
8079# qhasm: xmm4 ^= xmm2
8080# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8081# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8082pxor %xmm2,%xmm4
8083
8084# qhasm: xmm4 &= xmm9
8085# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
8086# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
8087pand %xmm12,%xmm4
8088
8089# qhasm: xmm2 &= xmm13
8090# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
8091# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
8092pand %xmm15,%xmm2
8093
8094# qhasm: xmm4 ^= xmm2
8095# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
8096# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
8097pxor %xmm2,%xmm4
8098
8099# qhasm: xmm2 ^= xmm10
8100# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
8101# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
8102pxor %xmm10,%xmm2
8103
8104# qhasm: xmm15 ^= xmm13
8105# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
8106# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
8107pxor %xmm15,%xmm13
8108
8109# qhasm: xmm14 ^= xmm9
8110# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
8111# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
8112pxor %xmm12,%xmm11
8113
8114# qhasm: xmm11 = xmm15
8115# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
8116# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
8117movdqa %xmm13,%xmm10
8118
8119# qhasm: xmm11 ^= xmm14
8120# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
8121# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
8122pxor %xmm11,%xmm10
8123
8124# qhasm: xmm11 &= xmm7
8125# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
8126# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
8127pand %xmm7,%xmm10
8128
8129# qhasm: xmm7 ^= xmm1
8130# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8131# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8132pxor %xmm1,%xmm7
8133
8134# qhasm: xmm7 &= xmm14
8135# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
8136# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
8137pand %xmm11,%xmm7
8138
8139# qhasm: xmm1 &= xmm15
8140# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
8141# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
8142pand %xmm13,%xmm1
8143
8144# qhasm: xmm7 ^= xmm1
8145# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
8146# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
8147pxor %xmm1,%xmm7
8148
8149# qhasm: xmm1 ^= xmm11
8150# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
8151# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
8152pxor %xmm10,%xmm1
8153
8154# qhasm: xmm7 ^= xmm12
8155# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
8156# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
8157pxor %xmm8,%xmm7
8158
8159# qhasm: xmm4 ^= xmm12
8160# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
8161# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
8162pxor %xmm8,%xmm4
8163
8164# qhasm: xmm1 ^= xmm8
8165# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
8166# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
8167pxor %xmm9,%xmm1
8168
8169# qhasm: xmm2 ^= xmm8
8170# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
8171# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
8172pxor %xmm9,%xmm2
8173
8174# qhasm: xmm7 ^= xmm0
8175# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
8176# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
8177pxor %xmm0,%xmm7
8178
8179# qhasm: xmm1 ^= xmm6
8180# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
8181# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
8182pxor %xmm6,%xmm1
8183
8184# qhasm: xmm4 ^= xmm7
8185# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
8186# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
8187pxor %xmm7,%xmm4
8188
8189# qhasm: xmm6 ^= xmm0
8190# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
8191# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
8192pxor %xmm0,%xmm6
8193
8194# qhasm: xmm0 ^= xmm1
8195# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
8196# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
8197pxor %xmm1,%xmm0
8198
8199# qhasm: xmm1 ^= xmm5
8200# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
8201# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
8202pxor %xmm5,%xmm1
8203
8204# qhasm: xmm5 ^= xmm2
8205# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
8206# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
8207pxor %xmm2,%xmm5
8208
8209# qhasm: xmm4 ^= xmm5
8210# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
8211# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
8212pxor %xmm5,%xmm4
8213
8214# qhasm: xmm2 ^= xmm3
8215# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
8216# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
8217pxor %xmm3,%xmm2
8218
8219# qhasm: xmm3 ^= xmm5
8220# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
8221# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
8222pxor %xmm5,%xmm3
8223
8224# qhasm: xmm6 ^= xmm3
8225# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
8226# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
8227pxor %xmm3,%xmm6
8228
8229# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
8230# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
8231# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
8232pshufd $0x93,%xmm0,%xmm8
8233
8234# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
8235# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
8236# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
8237pshufd $0x93,%xmm1,%xmm9
8238
8239# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
8240# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
8241# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
8242pshufd $0x93,%xmm4,%xmm10
8243
8244# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
8245# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
8246# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
8247pshufd $0x93,%xmm6,%xmm11
8248
8249# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
8250# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
8251# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
8252pshufd $0x93,%xmm3,%xmm12
8253
8254# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
8255# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
8256# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
8257pshufd $0x93,%xmm7,%xmm13
8258
8259# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
8260# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
8261# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
8262pshufd $0x93,%xmm2,%xmm14
8263
8264# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
8265# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
8266# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
8267pshufd $0x93,%xmm5,%xmm15
8268
8269# qhasm: xmm0 ^= xmm8
8270# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
8271# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
8272pxor %xmm8,%xmm0
8273
8274# qhasm: xmm1 ^= xmm9
8275# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
8276# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
8277pxor %xmm9,%xmm1
8278
8279# qhasm: xmm4 ^= xmm10
8280# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
8281# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
8282pxor %xmm10,%xmm4
8283
8284# qhasm: xmm6 ^= xmm11
8285# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
8286# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
8287pxor %xmm11,%xmm6
8288
8289# qhasm: xmm3 ^= xmm12
8290# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
8291# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
8292pxor %xmm12,%xmm3
8293
8294# qhasm: xmm7 ^= xmm13
8295# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
8296# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
8297pxor %xmm13,%xmm7
8298
8299# qhasm: xmm2 ^= xmm14
8300# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
8301# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
8302pxor %xmm14,%xmm2
8303
8304# qhasm: xmm5 ^= xmm15
8305# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
8306# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
8307pxor %xmm15,%xmm5
8308
8309# qhasm: xmm8 ^= xmm5
8310# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
8311# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
8312pxor %xmm5,%xmm8
8313
8314# qhasm: xmm9 ^= xmm0
8315# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
8316# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
8317pxor %xmm0,%xmm9
8318
8319# qhasm: xmm10 ^= xmm1
8320# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
8321# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
8322pxor %xmm1,%xmm10
8323
8324# qhasm: xmm9 ^= xmm5
8325# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
8326# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
8327pxor %xmm5,%xmm9
8328
8329# qhasm: xmm11 ^= xmm4
8330# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
8331# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
8332pxor %xmm4,%xmm11
8333
8334# qhasm: xmm12 ^= xmm6
8335# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
8336# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
8337pxor %xmm6,%xmm12
8338
8339# qhasm: xmm13 ^= xmm3
8340# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
8341# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
8342pxor %xmm3,%xmm13
8343
8344# qhasm: xmm11 ^= xmm5
8345# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
8346# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
8347pxor %xmm5,%xmm11
8348
8349# qhasm: xmm14 ^= xmm7
8350# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
8351# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
8352pxor %xmm7,%xmm14
8353
8354# qhasm: xmm15 ^= xmm2
8355# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
8356# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
8357pxor %xmm2,%xmm15
8358
8359# qhasm: xmm12 ^= xmm5
8360# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
8361# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
8362pxor %xmm5,%xmm12
8363
8364# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
8365# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
8366# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
8367pshufd $0x4E,%xmm0,%xmm0
8368
8369# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
8370# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
8371# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
8372pshufd $0x4E,%xmm1,%xmm1
8373
8374# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
8375# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
8376# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
8377pshufd $0x4E,%xmm4,%xmm4
8378
8379# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
8380# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
8381# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
8382pshufd $0x4E,%xmm6,%xmm6
8383
8384# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
8385# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
8386# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
8387pshufd $0x4E,%xmm3,%xmm3
8388
8389# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
8390# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
8391# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
8392pshufd $0x4E,%xmm7,%xmm7
8393
8394# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
8395# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
8396# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
8397pshufd $0x4E,%xmm2,%xmm2
8398
8399# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
8400# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
8401# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
8402pshufd $0x4E,%xmm5,%xmm5
8403
8404# qhasm: xmm8 ^= xmm0
8405# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
8406# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
8407pxor %xmm0,%xmm8
8408
8409# qhasm: xmm9 ^= xmm1
8410# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
8411# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
8412pxor %xmm1,%xmm9
8413
8414# qhasm: xmm10 ^= xmm4
8415# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
8416# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
8417pxor %xmm4,%xmm10
8418
8419# qhasm: xmm11 ^= xmm6
8420# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
8421# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
8422pxor %xmm6,%xmm11
8423
8424# qhasm: xmm12 ^= xmm3
8425# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
8426# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
8427pxor %xmm3,%xmm12
8428
8429# qhasm: xmm13 ^= xmm7
8430# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
8431# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
8432pxor %xmm7,%xmm13
8433
8434# qhasm: xmm14 ^= xmm2
8435# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
8436# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
8437pxor %xmm2,%xmm14
8438
8439# qhasm: xmm15 ^= xmm5
8440# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
8441# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
8442pxor %xmm5,%xmm15
8443
8444# qhasm: xmm8 ^= *(int128 *)(c + 896)
8445# asm 1: pxor 896(<c=int64#5),<xmm8=int6464#9
8446# asm 2: pxor 896(<c=%r8),<xmm8=%xmm8
8447pxor 896(%r8),%xmm8
8448
8449# qhasm: shuffle bytes of xmm8 by SR
8450# asm 1: pshufb SR,<xmm8=int6464#9
8451# asm 2: pshufb SR,<xmm8=%xmm8
8452pshufb SR,%xmm8
8453
8454# qhasm: xmm9 ^= *(int128 *)(c + 912)
8455# asm 1: pxor 912(<c=int64#5),<xmm9=int6464#10
8456# asm 2: pxor 912(<c=%r8),<xmm9=%xmm9
8457pxor 912(%r8),%xmm9
8458
8459# qhasm: shuffle bytes of xmm9 by SR
8460# asm 1: pshufb SR,<xmm9=int6464#10
8461# asm 2: pshufb SR,<xmm9=%xmm9
8462pshufb SR,%xmm9
8463
8464# qhasm: xmm10 ^= *(int128 *)(c + 928)
8465# asm 1: pxor 928(<c=int64#5),<xmm10=int6464#11
8466# asm 2: pxor 928(<c=%r8),<xmm10=%xmm10
8467pxor 928(%r8),%xmm10
8468
8469# qhasm: shuffle bytes of xmm10 by SR
8470# asm 1: pshufb SR,<xmm10=int6464#11
8471# asm 2: pshufb SR,<xmm10=%xmm10
8472pshufb SR,%xmm10
8473
8474# qhasm: xmm11 ^= *(int128 *)(c + 944)
8475# asm 1: pxor 944(<c=int64#5),<xmm11=int6464#12
8476# asm 2: pxor 944(<c=%r8),<xmm11=%xmm11
8477pxor 944(%r8),%xmm11
8478
8479# qhasm: shuffle bytes of xmm11 by SR
8480# asm 1: pshufb SR,<xmm11=int6464#12
8481# asm 2: pshufb SR,<xmm11=%xmm11
8482pshufb SR,%xmm11
8483
8484# qhasm: xmm12 ^= *(int128 *)(c + 960)
8485# asm 1: pxor 960(<c=int64#5),<xmm12=int6464#13
8486# asm 2: pxor 960(<c=%r8),<xmm12=%xmm12
8487pxor 960(%r8),%xmm12
8488
8489# qhasm: shuffle bytes of xmm12 by SR
8490# asm 1: pshufb SR,<xmm12=int6464#13
8491# asm 2: pshufb SR,<xmm12=%xmm12
8492pshufb SR,%xmm12
8493
8494# qhasm: xmm13 ^= *(int128 *)(c + 976)
8495# asm 1: pxor 976(<c=int64#5),<xmm13=int6464#14
8496# asm 2: pxor 976(<c=%r8),<xmm13=%xmm13
8497pxor 976(%r8),%xmm13
8498
8499# qhasm: shuffle bytes of xmm13 by SR
8500# asm 1: pshufb SR,<xmm13=int6464#14
8501# asm 2: pshufb SR,<xmm13=%xmm13
8502pshufb SR,%xmm13
8503
8504# qhasm: xmm14 ^= *(int128 *)(c + 992)
8505# asm 1: pxor 992(<c=int64#5),<xmm14=int6464#15
8506# asm 2: pxor 992(<c=%r8),<xmm14=%xmm14
8507pxor 992(%r8),%xmm14
8508
8509# qhasm: shuffle bytes of xmm14 by SR
8510# asm 1: pshufb SR,<xmm14=int6464#15
8511# asm 2: pshufb SR,<xmm14=%xmm14
8512pshufb SR,%xmm14
8513
8514# qhasm: xmm15 ^= *(int128 *)(c + 1008)
8515# asm 1: pxor 1008(<c=int64#5),<xmm15=int6464#16
8516# asm 2: pxor 1008(<c=%r8),<xmm15=%xmm15
8517pxor 1008(%r8),%xmm15
8518
8519# qhasm: shuffle bytes of xmm15 by SR
8520# asm 1: pshufb SR,<xmm15=int6464#16
8521# asm 2: pshufb SR,<xmm15=%xmm15
8522pshufb SR,%xmm15
8523
8524# qhasm: xmm13 ^= xmm14
8525# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
8526# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
8527pxor %xmm14,%xmm13
8528
8529# qhasm: xmm10 ^= xmm9
8530# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
8531# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
8532pxor %xmm9,%xmm10
8533
8534# qhasm: xmm13 ^= xmm8
8535# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
8536# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
8537pxor %xmm8,%xmm13
8538
8539# qhasm: xmm14 ^= xmm10
8540# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
8541# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
8542pxor %xmm10,%xmm14
8543
8544# qhasm: xmm11 ^= xmm8
8545# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
8546# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
8547pxor %xmm8,%xmm11
8548
8549# qhasm: xmm14 ^= xmm11
8550# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
8551# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
8552pxor %xmm11,%xmm14
8553
8554# qhasm: xmm11 ^= xmm15
8555# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
8556# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
8557pxor %xmm15,%xmm11
8558
8559# qhasm: xmm11 ^= xmm12
8560# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
8561# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
8562pxor %xmm12,%xmm11
8563
8564# qhasm: xmm15 ^= xmm13
8565# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
8566# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
8567pxor %xmm13,%xmm15
8568
8569# qhasm: xmm11 ^= xmm9
8570# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
8571# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
8572pxor %xmm9,%xmm11
8573
8574# qhasm: xmm12 ^= xmm13
8575# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
8576# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
8577pxor %xmm13,%xmm12
8578
8579# qhasm: xmm10 ^= xmm15
8580# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
8581# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
8582pxor %xmm15,%xmm10
8583
8584# qhasm: xmm9 ^= xmm13
8585# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
8586# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
8587pxor %xmm13,%xmm9
8588
8589# qhasm: xmm3 = xmm15
8590# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
8591# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
8592movdqa %xmm15,%xmm0
8593
8594# qhasm: xmm2 = xmm9
8595# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
8596# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
8597movdqa %xmm9,%xmm1
8598
8599# qhasm: xmm1 = xmm13
8600# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
8601# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
8602movdqa %xmm13,%xmm2
8603
8604# qhasm: xmm5 = xmm10
8605# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
8606# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
8607movdqa %xmm10,%xmm3
8608
8609# qhasm: xmm4 = xmm14
8610# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
8611# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
8612movdqa %xmm14,%xmm4
8613
8614# qhasm: xmm3 ^= xmm12
8615# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
8616# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
8617pxor %xmm12,%xmm0
8618
8619# qhasm: xmm2 ^= xmm10
8620# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
8621# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
8622pxor %xmm10,%xmm1
8623
8624# qhasm: xmm1 ^= xmm11
8625# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
8626# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
8627pxor %xmm11,%xmm2
8628
8629# qhasm: xmm5 ^= xmm12
8630# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
8631# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
8632pxor %xmm12,%xmm3
8633
8634# qhasm: xmm4 ^= xmm8
8635# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
8636# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
8637pxor %xmm8,%xmm4
8638
8639# qhasm: xmm6 = xmm3
8640# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
8641# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
8642movdqa %xmm0,%xmm5
8643
8644# qhasm: xmm0 = xmm2
8645# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
8646# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
8647movdqa %xmm1,%xmm6
8648
8649# qhasm: xmm7 = xmm3
8650# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
8651# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
8652movdqa %xmm0,%xmm7
8653
8654# qhasm: xmm2 |= xmm1
8655# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
8656# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
8657por %xmm2,%xmm1
8658
8659# qhasm: xmm3 |= xmm4
8660# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
8661# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
8662por %xmm4,%xmm0
8663
8664# qhasm: xmm7 ^= xmm0
8665# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
8666# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
8667pxor %xmm6,%xmm7
8668
8669# qhasm: xmm6 &= xmm4
8670# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
8671# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
8672pand %xmm4,%xmm5
8673
8674# qhasm: xmm0 &= xmm1
8675# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
8676# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
8677pand %xmm2,%xmm6
8678
8679# qhasm: xmm4 ^= xmm1
8680# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
8681# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
8682pxor %xmm2,%xmm4
8683
8684# qhasm: xmm7 &= xmm4
8685# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
8686# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
8687pand %xmm4,%xmm7
8688
8689# qhasm: xmm4 = xmm11
8690# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
8691# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
8692movdqa %xmm11,%xmm2
8693
8694# qhasm: xmm4 ^= xmm8
8695# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
8696# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
8697pxor %xmm8,%xmm2
8698
8699# qhasm: xmm5 &= xmm4
8700# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
8701# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
8702pand %xmm2,%xmm3
8703
8704# qhasm: xmm3 ^= xmm5
8705# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
8706# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
8707pxor %xmm3,%xmm0
8708
8709# qhasm: xmm2 ^= xmm5
8710# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8711# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8712pxor %xmm3,%xmm1
8713
8714# qhasm: xmm5 = xmm15
8715# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
8716# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
8717movdqa %xmm15,%xmm2
8718
8719# qhasm: xmm5 ^= xmm9
8720# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
8721# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
8722pxor %xmm9,%xmm2
8723
8724# qhasm: xmm4 = xmm13
8725# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
8726# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
8727movdqa %xmm13,%xmm3
8728
8729# qhasm: xmm1 = xmm5
8730# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
8731# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
8732movdqa %xmm2,%xmm4
8733
8734# qhasm: xmm4 ^= xmm14
8735# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
8736# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
8737pxor %xmm14,%xmm3
8738
8739# qhasm: xmm1 |= xmm4
8740# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
8741# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
8742por %xmm3,%xmm4
8743
8744# qhasm: xmm5 &= xmm4
8745# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
8746# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
8747pand %xmm3,%xmm2
8748
8749# qhasm: xmm0 ^= xmm5
8750# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
8751# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
8752pxor %xmm2,%xmm6
8753
8754# qhasm: xmm3 ^= xmm7
8755# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
8756# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
8757pxor %xmm7,%xmm0
8758
8759# qhasm: xmm2 ^= xmm6
8760# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
8761# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
8762pxor %xmm5,%xmm1
8763
8764# qhasm: xmm1 ^= xmm7
8765# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
8766# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
8767pxor %xmm7,%xmm4
8768
8769# qhasm: xmm0 ^= xmm6
8770# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
8771# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
8772pxor %xmm5,%xmm6
8773
8774# qhasm: xmm1 ^= xmm6
8775# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8776# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8777pxor %xmm5,%xmm4
8778
8779# qhasm: xmm4 = xmm10
8780# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
8781# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
8782movdqa %xmm10,%xmm2
8783
8784# qhasm: xmm5 = xmm12
8785# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
8786# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
8787movdqa %xmm12,%xmm3
8788
8789# qhasm: xmm6 = xmm9
8790# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
8791# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
8792movdqa %xmm9,%xmm5
8793
8794# qhasm: xmm7 = xmm15
8795# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
8796# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
8797movdqa %xmm15,%xmm7
8798
8799# qhasm: xmm4 &= xmm11
8800# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
8801# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
8802pand %xmm11,%xmm2
8803
8804# qhasm: xmm5 &= xmm8
8805# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
8806# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
8807pand %xmm8,%xmm3
8808
8809# qhasm: xmm6 &= xmm13
8810# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
8811# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
8812pand %xmm13,%xmm5
8813
8814# qhasm: xmm7 |= xmm14
8815# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
8816# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
8817por %xmm14,%xmm7
8818
8819# qhasm: xmm3 ^= xmm4
8820# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
8821# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
8822pxor %xmm2,%xmm0
8823
8824# qhasm: xmm2 ^= xmm5
8825# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
8826# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
8827pxor %xmm3,%xmm1
8828
8829# qhasm: xmm1 ^= xmm6
8830# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
8831# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
8832pxor %xmm5,%xmm4
8833
8834# qhasm: xmm0 ^= xmm7
8835# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
8836# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
8837pxor %xmm7,%xmm6
8838
8839# qhasm: xmm4 = xmm3
8840# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
8841# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
8842movdqa %xmm0,%xmm2
8843
8844# qhasm: xmm4 ^= xmm2
8845# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
8846# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
8847pxor %xmm1,%xmm2
8848
8849# qhasm: xmm3 &= xmm1
8850# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
8851# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
8852pand %xmm4,%xmm0
8853
8854# qhasm: xmm6 = xmm0
8855# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
8856# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
8857movdqa %xmm6,%xmm3
8858
8859# qhasm: xmm6 ^= xmm3
8860# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
8861# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
8862pxor %xmm0,%xmm3
8863
8864# qhasm: xmm7 = xmm4
8865# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
8866# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
8867movdqa %xmm2,%xmm5
8868
8869# qhasm: xmm7 &= xmm6
8870# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
8871# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
8872pand %xmm3,%xmm5
8873
8874# qhasm: xmm7 ^= xmm2
8875# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
8876# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
8877pxor %xmm1,%xmm5
8878
8879# qhasm: xmm5 = xmm1
8880# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
8881# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
8882movdqa %xmm4,%xmm7
8883
8884# qhasm: xmm5 ^= xmm0
8885# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8886# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8887pxor %xmm6,%xmm7
8888
8889# qhasm: xmm3 ^= xmm2
8890# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
8891# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
8892pxor %xmm1,%xmm0
8893
8894# qhasm: xmm5 &= xmm3
8895# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
8896# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
8897pand %xmm0,%xmm7
8898
8899# qhasm: xmm5 ^= xmm0
8900# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
8901# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
8902pxor %xmm6,%xmm7
8903
8904# qhasm: xmm1 ^= xmm5
8905# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
8906# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
8907pxor %xmm7,%xmm4
8908
8909# qhasm: xmm2 = xmm6
8910# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
8911# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
8912movdqa %xmm3,%xmm0
8913
8914# qhasm: xmm2 ^= xmm5
8915# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
8916# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
8917pxor %xmm7,%xmm0
8918
8919# qhasm: xmm2 &= xmm0
8920# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
8921# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
8922pand %xmm6,%xmm0
8923
8924# qhasm: xmm1 ^= xmm2
8925# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
8926# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
8927pxor %xmm0,%xmm4
8928
8929# qhasm: xmm6 ^= xmm2
8930# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
8931# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
8932pxor %xmm0,%xmm3
8933
8934# qhasm: xmm6 &= xmm7
8935# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
8936# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
8937pand %xmm5,%xmm3
8938
8939# qhasm: xmm6 ^= xmm4
8940# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
8941# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
8942pxor %xmm2,%xmm3
8943
8944# qhasm: xmm4 = xmm14
8945# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
8946# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
8947movdqa %xmm14,%xmm0
8948
8949# qhasm: xmm0 = xmm13
8950# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
8951# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
8952movdqa %xmm13,%xmm1
8953
8954# qhasm: xmm2 = xmm7
8955# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
8956# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
8957movdqa %xmm5,%xmm2
8958
8959# qhasm: xmm2 ^= xmm6
8960# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
8961# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
8962pxor %xmm3,%xmm2
8963
8964# qhasm: xmm2 &= xmm14
8965# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
8966# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
8967pand %xmm14,%xmm2
8968
8969# qhasm: xmm14 ^= xmm13
8970# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8971# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8972pxor %xmm13,%xmm14
8973
8974# qhasm: xmm14 &= xmm6
8975# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
8976# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
8977pand %xmm3,%xmm14
8978
8979# qhasm: xmm13 &= xmm7
8980# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
8981# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
8982pand %xmm5,%xmm13
8983
8984# qhasm: xmm14 ^= xmm13
8985# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
8986# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
8987pxor %xmm13,%xmm14
8988
8989# qhasm: xmm13 ^= xmm2
8990# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
8991# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
8992pxor %xmm2,%xmm13
8993
8994# qhasm: xmm4 ^= xmm8
8995# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
8996# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
8997pxor %xmm8,%xmm0
8998
8999# qhasm: xmm0 ^= xmm11
9000# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
9001# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
9002pxor %xmm11,%xmm1
9003
9004# qhasm: xmm7 ^= xmm5
9005# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9006# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9007pxor %xmm7,%xmm5
9008
9009# qhasm: xmm6 ^= xmm1
9010# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9011# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9012pxor %xmm4,%xmm3
9013
9014# qhasm: xmm3 = xmm7
9015# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9016# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9017movdqa %xmm5,%xmm2
9018
9019# qhasm: xmm3 ^= xmm6
9020# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9021# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9022pxor %xmm3,%xmm2
9023
9024# qhasm: xmm3 &= xmm4
9025# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9026# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9027pand %xmm0,%xmm2
9028
9029# qhasm: xmm4 ^= xmm0
9030# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9031# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9032pxor %xmm1,%xmm0
9033
9034# qhasm: xmm4 &= xmm6
9035# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9036# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9037pand %xmm3,%xmm0
9038
9039# qhasm: xmm0 &= xmm7
9040# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9041# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9042pand %xmm5,%xmm1
9043
9044# qhasm: xmm0 ^= xmm4
9045# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9046# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9047pxor %xmm0,%xmm1
9048
9049# qhasm: xmm4 ^= xmm3
9050# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9051# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9052pxor %xmm2,%xmm0
9053
9054# qhasm: xmm2 = xmm5
9055# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9056# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9057movdqa %xmm7,%xmm2
9058
9059# qhasm: xmm2 ^= xmm1
9060# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9061# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9062pxor %xmm4,%xmm2
9063
9064# qhasm: xmm2 &= xmm8
9065# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
9066# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
9067pand %xmm8,%xmm2
9068
9069# qhasm: xmm8 ^= xmm11
9070# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9071# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9072pxor %xmm11,%xmm8
9073
9074# qhasm: xmm8 &= xmm1
9075# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
9076# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
9077pand %xmm4,%xmm8
9078
9079# qhasm: xmm11 &= xmm5
9080# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
9081# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
9082pand %xmm7,%xmm11
9083
9084# qhasm: xmm8 ^= xmm11
9085# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
9086# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
9087pxor %xmm11,%xmm8
9088
9089# qhasm: xmm11 ^= xmm2
9090# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
9091# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
9092pxor %xmm2,%xmm11
9093
9094# qhasm: xmm14 ^= xmm4
9095# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
9096# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
9097pxor %xmm0,%xmm14
9098
9099# qhasm: xmm8 ^= xmm4
9100# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
9101# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
9102pxor %xmm0,%xmm8
9103
9104# qhasm: xmm13 ^= xmm0
9105# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
9106# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
9107pxor %xmm1,%xmm13
9108
9109# qhasm: xmm11 ^= xmm0
9110# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
9111# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
9112pxor %xmm1,%xmm11
9113
9114# qhasm: xmm4 = xmm15
9115# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
9116# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
9117movdqa %xmm15,%xmm0
9118
9119# qhasm: xmm0 = xmm9
9120# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
9121# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
9122movdqa %xmm9,%xmm1
9123
9124# qhasm: xmm4 ^= xmm12
9125# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
9126# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
9127pxor %xmm12,%xmm0
9128
9129# qhasm: xmm0 ^= xmm10
9130# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
9131# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
9132pxor %xmm10,%xmm1
9133
9134# qhasm: xmm3 = xmm7
9135# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9136# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9137movdqa %xmm5,%xmm2
9138
9139# qhasm: xmm3 ^= xmm6
9140# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9141# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9142pxor %xmm3,%xmm2
9143
9144# qhasm: xmm3 &= xmm4
9145# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
9146# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
9147pand %xmm0,%xmm2
9148
9149# qhasm: xmm4 ^= xmm0
9150# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
9151# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
9152pxor %xmm1,%xmm0
9153
9154# qhasm: xmm4 &= xmm6
9155# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
9156# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
9157pand %xmm3,%xmm0
9158
9159# qhasm: xmm0 &= xmm7
9160# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
9161# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
9162pand %xmm5,%xmm1
9163
9164# qhasm: xmm0 ^= xmm4
9165# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
9166# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
9167pxor %xmm0,%xmm1
9168
9169# qhasm: xmm4 ^= xmm3
9170# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
9171# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
9172pxor %xmm2,%xmm0
9173
9174# qhasm: xmm2 = xmm5
9175# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
9176# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
9177movdqa %xmm7,%xmm2
9178
9179# qhasm: xmm2 ^= xmm1
9180# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
9181# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
9182pxor %xmm4,%xmm2
9183
9184# qhasm: xmm2 &= xmm12
9185# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
9186# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
9187pand %xmm12,%xmm2
9188
9189# qhasm: xmm12 ^= xmm10
9190# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9191# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9192pxor %xmm10,%xmm12
9193
9194# qhasm: xmm12 &= xmm1
9195# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
9196# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
9197pand %xmm4,%xmm12
9198
9199# qhasm: xmm10 &= xmm5
9200# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
9201# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
9202pand %xmm7,%xmm10
9203
9204# qhasm: xmm12 ^= xmm10
9205# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
9206# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
9207pxor %xmm10,%xmm12
9208
9209# qhasm: xmm10 ^= xmm2
9210# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
9211# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
9212pxor %xmm2,%xmm10
9213
9214# qhasm: xmm7 ^= xmm5
9215# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
9216# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
9217pxor %xmm7,%xmm5
9218
9219# qhasm: xmm6 ^= xmm1
9220# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
9221# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
9222pxor %xmm4,%xmm3
9223
9224# qhasm: xmm3 = xmm7
9225# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
9226# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
9227movdqa %xmm5,%xmm2
9228
9229# qhasm: xmm3 ^= xmm6
9230# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
9231# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
9232pxor %xmm3,%xmm2
9233
9234# qhasm: xmm3 &= xmm15
9235# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
9236# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
9237pand %xmm15,%xmm2
9238
9239# qhasm: xmm15 ^= xmm9
9240# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9241# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9242pxor %xmm9,%xmm15
9243
9244# qhasm: xmm15 &= xmm6
9245# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
9246# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
9247pand %xmm3,%xmm15
9248
9249# qhasm: xmm9 &= xmm7
9250# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
9251# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
9252pand %xmm5,%xmm9
9253
9254# qhasm: xmm15 ^= xmm9
9255# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
9256# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
9257pxor %xmm9,%xmm15
9258
9259# qhasm: xmm9 ^= xmm3
9260# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
9261# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
9262pxor %xmm2,%xmm9
9263
9264# qhasm: xmm15 ^= xmm4
9265# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
9266# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
9267pxor %xmm0,%xmm15
9268
9269# qhasm: xmm12 ^= xmm4
9270# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
9271# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
9272pxor %xmm0,%xmm12
9273
9274# qhasm: xmm9 ^= xmm0
9275# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
9276# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
9277pxor %xmm1,%xmm9
9278
9279# qhasm: xmm10 ^= xmm0
9280# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
9281# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
9282pxor %xmm1,%xmm10
9283
9284# qhasm: xmm15 ^= xmm8
9285# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
9286# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
9287pxor %xmm8,%xmm15
9288
9289# qhasm: xmm9 ^= xmm14
9290# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
9291# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
9292pxor %xmm14,%xmm9
9293
9294# qhasm: xmm12 ^= xmm15
9295# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
9296# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
9297pxor %xmm15,%xmm12
9298
9299# qhasm: xmm14 ^= xmm8
9300# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
9301# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
9302pxor %xmm8,%xmm14
9303
9304# qhasm: xmm8 ^= xmm9
9305# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
9306# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
9307pxor %xmm9,%xmm8
9308
9309# qhasm: xmm9 ^= xmm13
9310# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
9311# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
9312pxor %xmm13,%xmm9
9313
9314# qhasm: xmm13 ^= xmm10
9315# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
9316# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
9317pxor %xmm10,%xmm13
9318
9319# qhasm: xmm12 ^= xmm13
9320# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
9321# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
9322pxor %xmm13,%xmm12
9323
9324# qhasm: xmm10 ^= xmm11
9325# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
9326# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
9327pxor %xmm11,%xmm10
9328
9329# qhasm: xmm11 ^= xmm13
9330# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
9331# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
9332pxor %xmm13,%xmm11
9333
9334# qhasm: xmm14 ^= xmm11
9335# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
9336# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
9337pxor %xmm11,%xmm14
9338
9339# qhasm: xmm0 = shuffle dwords of xmm8 by 0x93
9340# asm 1: pshufd $0x93,<xmm8=int6464#9,>xmm0=int6464#1
9341# asm 2: pshufd $0x93,<xmm8=%xmm8,>xmm0=%xmm0
9342pshufd $0x93,%xmm8,%xmm0
9343
9344# qhasm: xmm1 = shuffle dwords of xmm9 by 0x93
9345# asm 1: pshufd $0x93,<xmm9=int6464#10,>xmm1=int6464#2
9346# asm 2: pshufd $0x93,<xmm9=%xmm9,>xmm1=%xmm1
9347pshufd $0x93,%xmm9,%xmm1
9348
9349# qhasm: xmm2 = shuffle dwords of xmm12 by 0x93
9350# asm 1: pshufd $0x93,<xmm12=int6464#13,>xmm2=int6464#3
9351# asm 2: pshufd $0x93,<xmm12=%xmm12,>xmm2=%xmm2
9352pshufd $0x93,%xmm12,%xmm2
9353
9354# qhasm: xmm3 = shuffle dwords of xmm14 by 0x93
9355# asm 1: pshufd $0x93,<xmm14=int6464#15,>xmm3=int6464#4
9356# asm 2: pshufd $0x93,<xmm14=%xmm14,>xmm3=%xmm3
9357pshufd $0x93,%xmm14,%xmm3
9358
9359# qhasm: xmm4 = shuffle dwords of xmm11 by 0x93
9360# asm 1: pshufd $0x93,<xmm11=int6464#12,>xmm4=int6464#5
9361# asm 2: pshufd $0x93,<xmm11=%xmm11,>xmm4=%xmm4
9362pshufd $0x93,%xmm11,%xmm4
9363
9364# qhasm: xmm5 = shuffle dwords of xmm15 by 0x93
9365# asm 1: pshufd $0x93,<xmm15=int6464#16,>xmm5=int6464#6
9366# asm 2: pshufd $0x93,<xmm15=%xmm15,>xmm5=%xmm5
9367pshufd $0x93,%xmm15,%xmm5
9368
9369# qhasm: xmm6 = shuffle dwords of xmm10 by 0x93
9370# asm 1: pshufd $0x93,<xmm10=int6464#11,>xmm6=int6464#7
9371# asm 2: pshufd $0x93,<xmm10=%xmm10,>xmm6=%xmm6
9372pshufd $0x93,%xmm10,%xmm6
9373
9374# qhasm: xmm7 = shuffle dwords of xmm13 by 0x93
9375# asm 1: pshufd $0x93,<xmm13=int6464#14,>xmm7=int6464#8
9376# asm 2: pshufd $0x93,<xmm13=%xmm13,>xmm7=%xmm7
9377pshufd $0x93,%xmm13,%xmm7
9378
9379# qhasm: xmm8 ^= xmm0
9380# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
9381# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
9382pxor %xmm0,%xmm8
9383
9384# qhasm: xmm9 ^= xmm1
9385# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
9386# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
9387pxor %xmm1,%xmm9
9388
9389# qhasm: xmm12 ^= xmm2
9390# asm 1: pxor <xmm2=int6464#3,<xmm12=int6464#13
9391# asm 2: pxor <xmm2=%xmm2,<xmm12=%xmm12
9392pxor %xmm2,%xmm12
9393
9394# qhasm: xmm14 ^= xmm3
9395# asm 1: pxor <xmm3=int6464#4,<xmm14=int6464#15
9396# asm 2: pxor <xmm3=%xmm3,<xmm14=%xmm14
9397pxor %xmm3,%xmm14
9398
9399# qhasm: xmm11 ^= xmm4
9400# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
9401# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
9402pxor %xmm4,%xmm11
9403
9404# qhasm: xmm15 ^= xmm5
9405# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
9406# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
9407pxor %xmm5,%xmm15
9408
9409# qhasm: xmm10 ^= xmm6
9410# asm 1: pxor <xmm6=int6464#7,<xmm10=int6464#11
9411# asm 2: pxor <xmm6=%xmm6,<xmm10=%xmm10
9412pxor %xmm6,%xmm10
9413
9414# qhasm: xmm13 ^= xmm7
9415# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
9416# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
9417pxor %xmm7,%xmm13
9418
9419# qhasm: xmm0 ^= xmm13
9420# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
9421# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
9422pxor %xmm13,%xmm0
9423
9424# qhasm: xmm1 ^= xmm8
9425# asm 1: pxor <xmm8=int6464#9,<xmm1=int6464#2
9426# asm 2: pxor <xmm8=%xmm8,<xmm1=%xmm1
9427pxor %xmm8,%xmm1
9428
9429# qhasm: xmm2 ^= xmm9
9430# asm 1: pxor <xmm9=int6464#10,<xmm2=int6464#3
9431# asm 2: pxor <xmm9=%xmm9,<xmm2=%xmm2
9432pxor %xmm9,%xmm2
9433
9434# qhasm: xmm1 ^= xmm13
9435# asm 1: pxor <xmm13=int6464#14,<xmm1=int6464#2
9436# asm 2: pxor <xmm13=%xmm13,<xmm1=%xmm1
9437pxor %xmm13,%xmm1
9438
9439# qhasm: xmm3 ^= xmm12
9440# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
9441# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
9442pxor %xmm12,%xmm3
9443
9444# qhasm: xmm4 ^= xmm14
9445# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#5
9446# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm4
9447pxor %xmm14,%xmm4
9448
9449# qhasm: xmm5 ^= xmm11
9450# asm 1: pxor <xmm11=int6464#12,<xmm5=int6464#6
9451# asm 2: pxor <xmm11=%xmm11,<xmm5=%xmm5
9452pxor %xmm11,%xmm5
9453
9454# qhasm: xmm3 ^= xmm13
9455# asm 1: pxor <xmm13=int6464#14,<xmm3=int6464#4
9456# asm 2: pxor <xmm13=%xmm13,<xmm3=%xmm3
9457pxor %xmm13,%xmm3
9458
9459# qhasm: xmm6 ^= xmm15
9460# asm 1: pxor <xmm15=int6464#16,<xmm6=int6464#7
9461# asm 2: pxor <xmm15=%xmm15,<xmm6=%xmm6
9462pxor %xmm15,%xmm6
9463
9464# qhasm: xmm7 ^= xmm10
9465# asm 1: pxor <xmm10=int6464#11,<xmm7=int6464#8
9466# asm 2: pxor <xmm10=%xmm10,<xmm7=%xmm7
9467pxor %xmm10,%xmm7
9468
9469# qhasm: xmm4 ^= xmm13
9470# asm 1: pxor <xmm13=int6464#14,<xmm4=int6464#5
9471# asm 2: pxor <xmm13=%xmm13,<xmm4=%xmm4
9472pxor %xmm13,%xmm4
9473
9474# qhasm: xmm8 = shuffle dwords of xmm8 by 0x4E
9475# asm 1: pshufd $0x4E,<xmm8=int6464#9,>xmm8=int6464#9
9476# asm 2: pshufd $0x4E,<xmm8=%xmm8,>xmm8=%xmm8
9477pshufd $0x4E,%xmm8,%xmm8
9478
9479# qhasm: xmm9 = shuffle dwords of xmm9 by 0x4E
9480# asm 1: pshufd $0x4E,<xmm9=int6464#10,>xmm9=int6464#10
9481# asm 2: pshufd $0x4E,<xmm9=%xmm9,>xmm9=%xmm9
9482pshufd $0x4E,%xmm9,%xmm9
9483
9484# qhasm: xmm12 = shuffle dwords of xmm12 by 0x4E
9485# asm 1: pshufd $0x4E,<xmm12=int6464#13,>xmm12=int6464#13
9486# asm 2: pshufd $0x4E,<xmm12=%xmm12,>xmm12=%xmm12
9487pshufd $0x4E,%xmm12,%xmm12
9488
9489# qhasm: xmm14 = shuffle dwords of xmm14 by 0x4E
9490# asm 1: pshufd $0x4E,<xmm14=int6464#15,>xmm14=int6464#15
9491# asm 2: pshufd $0x4E,<xmm14=%xmm14,>xmm14=%xmm14
9492pshufd $0x4E,%xmm14,%xmm14
9493
9494# qhasm: xmm11 = shuffle dwords of xmm11 by 0x4E
9495# asm 1: pshufd $0x4E,<xmm11=int6464#12,>xmm11=int6464#12
9496# asm 2: pshufd $0x4E,<xmm11=%xmm11,>xmm11=%xmm11
9497pshufd $0x4E,%xmm11,%xmm11
9498
9499# qhasm: xmm15 = shuffle dwords of xmm15 by 0x4E
9500# asm 1: pshufd $0x4E,<xmm15=int6464#16,>xmm15=int6464#16
9501# asm 2: pshufd $0x4E,<xmm15=%xmm15,>xmm15=%xmm15
9502pshufd $0x4E,%xmm15,%xmm15
9503
9504# qhasm: xmm10 = shuffle dwords of xmm10 by 0x4E
9505# asm 1: pshufd $0x4E,<xmm10=int6464#11,>xmm10=int6464#11
9506# asm 2: pshufd $0x4E,<xmm10=%xmm10,>xmm10=%xmm10
9507pshufd $0x4E,%xmm10,%xmm10
9508
9509# qhasm: xmm13 = shuffle dwords of xmm13 by 0x4E
9510# asm 1: pshufd $0x4E,<xmm13=int6464#14,>xmm13=int6464#14
9511# asm 2: pshufd $0x4E,<xmm13=%xmm13,>xmm13=%xmm13
9512pshufd $0x4E,%xmm13,%xmm13
9513
9514# qhasm: xmm0 ^= xmm8
9515# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
9516# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
9517pxor %xmm8,%xmm0
9518
9519# qhasm: xmm1 ^= xmm9
9520# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
9521# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
9522pxor %xmm9,%xmm1
9523
9524# qhasm: xmm2 ^= xmm12
9525# asm 1: pxor <xmm12=int6464#13,<xmm2=int6464#3
9526# asm 2: pxor <xmm12=%xmm12,<xmm2=%xmm2
9527pxor %xmm12,%xmm2
9528
9529# qhasm: xmm3 ^= xmm14
9530# asm 1: pxor <xmm14=int6464#15,<xmm3=int6464#4
9531# asm 2: pxor <xmm14=%xmm14,<xmm3=%xmm3
9532pxor %xmm14,%xmm3
9533
9534# qhasm: xmm4 ^= xmm11
9535# asm 1: pxor <xmm11=int6464#12,<xmm4=int6464#5
9536# asm 2: pxor <xmm11=%xmm11,<xmm4=%xmm4
9537pxor %xmm11,%xmm4
9538
9539# qhasm: xmm5 ^= xmm15
9540# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
9541# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
9542pxor %xmm15,%xmm5
9543
9544# qhasm: xmm6 ^= xmm10
9545# asm 1: pxor <xmm10=int6464#11,<xmm6=int6464#7
9546# asm 2: pxor <xmm10=%xmm10,<xmm6=%xmm6
9547pxor %xmm10,%xmm6
9548
9549# qhasm: xmm7 ^= xmm13
9550# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
9551# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
9552pxor %xmm13,%xmm7
9553
9554# qhasm: xmm0 ^= *(int128 *)(c + 1024)
9555# asm 1: pxor 1024(<c=int64#5),<xmm0=int6464#1
9556# asm 2: pxor 1024(<c=%r8),<xmm0=%xmm0
9557pxor 1024(%r8),%xmm0
9558
9559# qhasm: shuffle bytes of xmm0 by SR
9560# asm 1: pshufb SR,<xmm0=int6464#1
9561# asm 2: pshufb SR,<xmm0=%xmm0
9562pshufb SR,%xmm0
9563
9564# qhasm: xmm1 ^= *(int128 *)(c + 1040)
9565# asm 1: pxor 1040(<c=int64#5),<xmm1=int6464#2
9566# asm 2: pxor 1040(<c=%r8),<xmm1=%xmm1
9567pxor 1040(%r8),%xmm1
9568
9569# qhasm: shuffle bytes of xmm1 by SR
9570# asm 1: pshufb SR,<xmm1=int6464#2
9571# asm 2: pshufb SR,<xmm1=%xmm1
9572pshufb SR,%xmm1
9573
9574# qhasm: xmm2 ^= *(int128 *)(c + 1056)
9575# asm 1: pxor 1056(<c=int64#5),<xmm2=int6464#3
9576# asm 2: pxor 1056(<c=%r8),<xmm2=%xmm2
9577pxor 1056(%r8),%xmm2
9578
9579# qhasm: shuffle bytes of xmm2 by SR
9580# asm 1: pshufb SR,<xmm2=int6464#3
9581# asm 2: pshufb SR,<xmm2=%xmm2
9582pshufb SR,%xmm2
9583
9584# qhasm: xmm3 ^= *(int128 *)(c + 1072)
9585# asm 1: pxor 1072(<c=int64#5),<xmm3=int6464#4
9586# asm 2: pxor 1072(<c=%r8),<xmm3=%xmm3
9587pxor 1072(%r8),%xmm3
9588
9589# qhasm: shuffle bytes of xmm3 by SR
9590# asm 1: pshufb SR,<xmm3=int6464#4
9591# asm 2: pshufb SR,<xmm3=%xmm3
9592pshufb SR,%xmm3
9593
9594# qhasm: xmm4 ^= *(int128 *)(c + 1088)
9595# asm 1: pxor 1088(<c=int64#5),<xmm4=int6464#5
9596# asm 2: pxor 1088(<c=%r8),<xmm4=%xmm4
9597pxor 1088(%r8),%xmm4
9598
9599# qhasm: shuffle bytes of xmm4 by SR
9600# asm 1: pshufb SR,<xmm4=int6464#5
9601# asm 2: pshufb SR,<xmm4=%xmm4
9602pshufb SR,%xmm4
9603
9604# qhasm: xmm5 ^= *(int128 *)(c + 1104)
9605# asm 1: pxor 1104(<c=int64#5),<xmm5=int6464#6
9606# asm 2: pxor 1104(<c=%r8),<xmm5=%xmm5
9607pxor 1104(%r8),%xmm5
9608
9609# qhasm: shuffle bytes of xmm5 by SR
9610# asm 1: pshufb SR,<xmm5=int6464#6
9611# asm 2: pshufb SR,<xmm5=%xmm5
9612pshufb SR,%xmm5
9613
9614# qhasm: xmm6 ^= *(int128 *)(c + 1120)
9615# asm 1: pxor 1120(<c=int64#5),<xmm6=int6464#7
9616# asm 2: pxor 1120(<c=%r8),<xmm6=%xmm6
9617pxor 1120(%r8),%xmm6
9618
9619# qhasm: shuffle bytes of xmm6 by SR
9620# asm 1: pshufb SR,<xmm6=int6464#7
9621# asm 2: pshufb SR,<xmm6=%xmm6
9622pshufb SR,%xmm6
9623
9624# qhasm: xmm7 ^= *(int128 *)(c + 1136)
9625# asm 1: pxor 1136(<c=int64#5),<xmm7=int6464#8
9626# asm 2: pxor 1136(<c=%r8),<xmm7=%xmm7
9627pxor 1136(%r8),%xmm7
9628
9629# qhasm: shuffle bytes of xmm7 by SR
9630# asm 1: pshufb SR,<xmm7=int6464#8
9631# asm 2: pshufb SR,<xmm7=%xmm7
9632pshufb SR,%xmm7
9633
9634# qhasm: xmm5 ^= xmm6
9635# asm 1: pxor <xmm6=int6464#7,<xmm5=int6464#6
9636# asm 2: pxor <xmm6=%xmm6,<xmm5=%xmm5
9637pxor %xmm6,%xmm5
9638
9639# qhasm: xmm2 ^= xmm1
9640# asm 1: pxor <xmm1=int6464#2,<xmm2=int6464#3
9641# asm 2: pxor <xmm1=%xmm1,<xmm2=%xmm2
9642pxor %xmm1,%xmm2
9643
9644# qhasm: xmm5 ^= xmm0
9645# asm 1: pxor <xmm0=int6464#1,<xmm5=int6464#6
9646# asm 2: pxor <xmm0=%xmm0,<xmm5=%xmm5
9647pxor %xmm0,%xmm5
9648
9649# qhasm: xmm6 ^= xmm2
9650# asm 1: pxor <xmm2=int6464#3,<xmm6=int6464#7
9651# asm 2: pxor <xmm2=%xmm2,<xmm6=%xmm6
9652pxor %xmm2,%xmm6
9653
9654# qhasm: xmm3 ^= xmm0
9655# asm 1: pxor <xmm0=int6464#1,<xmm3=int6464#4
9656# asm 2: pxor <xmm0=%xmm0,<xmm3=%xmm3
9657pxor %xmm0,%xmm3
9658
9659# qhasm: xmm6 ^= xmm3
9660# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
9661# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
9662pxor %xmm3,%xmm6
9663
9664# qhasm: xmm3 ^= xmm7
9665# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#4
9666# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm3
9667pxor %xmm7,%xmm3
9668
9669# qhasm: xmm3 ^= xmm4
9670# asm 1: pxor <xmm4=int6464#5,<xmm3=int6464#4
9671# asm 2: pxor <xmm4=%xmm4,<xmm3=%xmm3
9672pxor %xmm4,%xmm3
9673
9674# qhasm: xmm7 ^= xmm5
9675# asm 1: pxor <xmm5=int6464#6,<xmm7=int6464#8
9676# asm 2: pxor <xmm5=%xmm5,<xmm7=%xmm7
9677pxor %xmm5,%xmm7
9678
9679# qhasm: xmm3 ^= xmm1
9680# asm 1: pxor <xmm1=int6464#2,<xmm3=int6464#4
9681# asm 2: pxor <xmm1=%xmm1,<xmm3=%xmm3
9682pxor %xmm1,%xmm3
9683
9684# qhasm: xmm4 ^= xmm5
9685# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
9686# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
9687pxor %xmm5,%xmm4
9688
9689# qhasm: xmm2 ^= xmm7
9690# asm 1: pxor <xmm7=int6464#8,<xmm2=int6464#3
9691# asm 2: pxor <xmm7=%xmm7,<xmm2=%xmm2
9692pxor %xmm7,%xmm2
9693
9694# qhasm: xmm1 ^= xmm5
9695# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
9696# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
9697pxor %xmm5,%xmm1
9698
9699# qhasm: xmm11 = xmm7
9700# asm 1: movdqa <xmm7=int6464#8,>xmm11=int6464#9
9701# asm 2: movdqa <xmm7=%xmm7,>xmm11=%xmm8
9702movdqa %xmm7,%xmm8
9703
9704# qhasm: xmm10 = xmm1
9705# asm 1: movdqa <xmm1=int6464#2,>xmm10=int6464#10
9706# asm 2: movdqa <xmm1=%xmm1,>xmm10=%xmm9
9707movdqa %xmm1,%xmm9
9708
9709# qhasm: xmm9 = xmm5
9710# asm 1: movdqa <xmm5=int6464#6,>xmm9=int6464#11
9711# asm 2: movdqa <xmm5=%xmm5,>xmm9=%xmm10
9712movdqa %xmm5,%xmm10
9713
9714# qhasm: xmm13 = xmm2
9715# asm 1: movdqa <xmm2=int6464#3,>xmm13=int6464#12
9716# asm 2: movdqa <xmm2=%xmm2,>xmm13=%xmm11
9717movdqa %xmm2,%xmm11
9718
9719# qhasm: xmm12 = xmm6
9720# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#13
9721# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm12
9722movdqa %xmm6,%xmm12
9723
9724# qhasm: xmm11 ^= xmm4
9725# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#9
9726# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm8
9727pxor %xmm4,%xmm8
9728
9729# qhasm: xmm10 ^= xmm2
9730# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#10
9731# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm9
9732pxor %xmm2,%xmm9
9733
9734# qhasm: xmm9 ^= xmm3
9735# asm 1: pxor <xmm3=int6464#4,<xmm9=int6464#11
9736# asm 2: pxor <xmm3=%xmm3,<xmm9=%xmm10
9737pxor %xmm3,%xmm10
9738
9739# qhasm: xmm13 ^= xmm4
9740# asm 1: pxor <xmm4=int6464#5,<xmm13=int6464#12
9741# asm 2: pxor <xmm4=%xmm4,<xmm13=%xmm11
9742pxor %xmm4,%xmm11
9743
9744# qhasm: xmm12 ^= xmm0
9745# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
9746# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
9747pxor %xmm0,%xmm12
9748
9749# qhasm: xmm14 = xmm11
9750# asm 1: movdqa <xmm11=int6464#9,>xmm14=int6464#14
9751# asm 2: movdqa <xmm11=%xmm8,>xmm14=%xmm13
9752movdqa %xmm8,%xmm13
9753
9754# qhasm: xmm8 = xmm10
9755# asm 1: movdqa <xmm10=int6464#10,>xmm8=int6464#15
9756# asm 2: movdqa <xmm10=%xmm9,>xmm8=%xmm14
9757movdqa %xmm9,%xmm14
9758
9759# qhasm: xmm15 = xmm11
9760# asm 1: movdqa <xmm11=int6464#9,>xmm15=int6464#16
9761# asm 2: movdqa <xmm11=%xmm8,>xmm15=%xmm15
9762movdqa %xmm8,%xmm15
9763
9764# qhasm: xmm10 |= xmm9
9765# asm 1: por <xmm9=int6464#11,<xmm10=int6464#10
9766# asm 2: por <xmm9=%xmm10,<xmm10=%xmm9
9767por %xmm10,%xmm9
9768
9769# qhasm: xmm11 |= xmm12
9770# asm 1: por <xmm12=int6464#13,<xmm11=int6464#9
9771# asm 2: por <xmm12=%xmm12,<xmm11=%xmm8
9772por %xmm12,%xmm8
9773
9774# qhasm: xmm15 ^= xmm8
9775# asm 1: pxor <xmm8=int6464#15,<xmm15=int6464#16
9776# asm 2: pxor <xmm8=%xmm14,<xmm15=%xmm15
9777pxor %xmm14,%xmm15
9778
9779# qhasm: xmm14 &= xmm12
9780# asm 1: pand <xmm12=int6464#13,<xmm14=int6464#14
9781# asm 2: pand <xmm12=%xmm12,<xmm14=%xmm13
9782pand %xmm12,%xmm13
9783
9784# qhasm: xmm8 &= xmm9
9785# asm 1: pand <xmm9=int6464#11,<xmm8=int6464#15
9786# asm 2: pand <xmm9=%xmm10,<xmm8=%xmm14
9787pand %xmm10,%xmm14
9788
9789# qhasm: xmm12 ^= xmm9
9790# asm 1: pxor <xmm9=int6464#11,<xmm12=int6464#13
9791# asm 2: pxor <xmm9=%xmm10,<xmm12=%xmm12
9792pxor %xmm10,%xmm12
9793
9794# qhasm: xmm15 &= xmm12
9795# asm 1: pand <xmm12=int6464#13,<xmm15=int6464#16
9796# asm 2: pand <xmm12=%xmm12,<xmm15=%xmm15
9797pand %xmm12,%xmm15
9798
9799# qhasm: xmm12 = xmm3
9800# asm 1: movdqa <xmm3=int6464#4,>xmm12=int6464#11
9801# asm 2: movdqa <xmm3=%xmm3,>xmm12=%xmm10
9802movdqa %xmm3,%xmm10
9803
9804# qhasm: xmm12 ^= xmm0
9805# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#11
9806# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm10
9807pxor %xmm0,%xmm10
9808
9809# qhasm: xmm13 &= xmm12
9810# asm 1: pand <xmm12=int6464#11,<xmm13=int6464#12
9811# asm 2: pand <xmm12=%xmm10,<xmm13=%xmm11
9812pand %xmm10,%xmm11
9813
9814# qhasm: xmm11 ^= xmm13
9815# asm 1: pxor <xmm13=int6464#12,<xmm11=int6464#9
9816# asm 2: pxor <xmm13=%xmm11,<xmm11=%xmm8
9817pxor %xmm11,%xmm8
9818
9819# qhasm: xmm10 ^= xmm13
9820# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9821# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9822pxor %xmm11,%xmm9
9823
9824# qhasm: xmm13 = xmm7
9825# asm 1: movdqa <xmm7=int6464#8,>xmm13=int6464#11
9826# asm 2: movdqa <xmm7=%xmm7,>xmm13=%xmm10
9827movdqa %xmm7,%xmm10
9828
9829# qhasm: xmm13 ^= xmm1
9830# asm 1: pxor <xmm1=int6464#2,<xmm13=int6464#11
9831# asm 2: pxor <xmm1=%xmm1,<xmm13=%xmm10
9832pxor %xmm1,%xmm10
9833
9834# qhasm: xmm12 = xmm5
9835# asm 1: movdqa <xmm5=int6464#6,>xmm12=int6464#12
9836# asm 2: movdqa <xmm5=%xmm5,>xmm12=%xmm11
9837movdqa %xmm5,%xmm11
9838
9839# qhasm: xmm9 = xmm13
9840# asm 1: movdqa <xmm13=int6464#11,>xmm9=int6464#13
9841# asm 2: movdqa <xmm13=%xmm10,>xmm9=%xmm12
9842movdqa %xmm10,%xmm12
9843
9844# qhasm: xmm12 ^= xmm6
9845# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#12
9846# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm11
9847pxor %xmm6,%xmm11
9848
9849# qhasm: xmm9 |= xmm12
9850# asm 1: por <xmm12=int6464#12,<xmm9=int6464#13
9851# asm 2: por <xmm12=%xmm11,<xmm9=%xmm12
9852por %xmm11,%xmm12
9853
9854# qhasm: xmm13 &= xmm12
9855# asm 1: pand <xmm12=int6464#12,<xmm13=int6464#11
9856# asm 2: pand <xmm12=%xmm11,<xmm13=%xmm10
9857pand %xmm11,%xmm10
9858
9859# qhasm: xmm8 ^= xmm13
9860# asm 1: pxor <xmm13=int6464#11,<xmm8=int6464#15
9861# asm 2: pxor <xmm13=%xmm10,<xmm8=%xmm14
9862pxor %xmm10,%xmm14
9863
9864# qhasm: xmm11 ^= xmm15
9865# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#9
9866# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm8
9867pxor %xmm15,%xmm8
9868
9869# qhasm: xmm10 ^= xmm14
9870# asm 1: pxor <xmm14=int6464#14,<xmm10=int6464#10
9871# asm 2: pxor <xmm14=%xmm13,<xmm10=%xmm9
9872pxor %xmm13,%xmm9
9873
9874# qhasm: xmm9 ^= xmm15
9875# asm 1: pxor <xmm15=int6464#16,<xmm9=int6464#13
9876# asm 2: pxor <xmm15=%xmm15,<xmm9=%xmm12
9877pxor %xmm15,%xmm12
9878
9879# qhasm: xmm8 ^= xmm14
9880# asm 1: pxor <xmm14=int6464#14,<xmm8=int6464#15
9881# asm 2: pxor <xmm14=%xmm13,<xmm8=%xmm14
9882pxor %xmm13,%xmm14
9883
9884# qhasm: xmm9 ^= xmm14
9885# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9886# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9887pxor %xmm13,%xmm12
9888
9889# qhasm: xmm12 = xmm2
9890# asm 1: movdqa <xmm2=int6464#3,>xmm12=int6464#11
9891# asm 2: movdqa <xmm2=%xmm2,>xmm12=%xmm10
9892movdqa %xmm2,%xmm10
9893
9894# qhasm: xmm13 = xmm4
9895# asm 1: movdqa <xmm4=int6464#5,>xmm13=int6464#12
9896# asm 2: movdqa <xmm4=%xmm4,>xmm13=%xmm11
9897movdqa %xmm4,%xmm11
9898
9899# qhasm: xmm14 = xmm1
9900# asm 1: movdqa <xmm1=int6464#2,>xmm14=int6464#14
9901# asm 2: movdqa <xmm1=%xmm1,>xmm14=%xmm13
9902movdqa %xmm1,%xmm13
9903
9904# qhasm: xmm15 = xmm7
9905# asm 1: movdqa <xmm7=int6464#8,>xmm15=int6464#16
9906# asm 2: movdqa <xmm7=%xmm7,>xmm15=%xmm15
9907movdqa %xmm7,%xmm15
9908
9909# qhasm: xmm12 &= xmm3
9910# asm 1: pand <xmm3=int6464#4,<xmm12=int6464#11
9911# asm 2: pand <xmm3=%xmm3,<xmm12=%xmm10
9912pand %xmm3,%xmm10
9913
9914# qhasm: xmm13 &= xmm0
9915# asm 1: pand <xmm0=int6464#1,<xmm13=int6464#12
9916# asm 2: pand <xmm0=%xmm0,<xmm13=%xmm11
9917pand %xmm0,%xmm11
9918
9919# qhasm: xmm14 &= xmm5
9920# asm 1: pand <xmm5=int6464#6,<xmm14=int6464#14
9921# asm 2: pand <xmm5=%xmm5,<xmm14=%xmm13
9922pand %xmm5,%xmm13
9923
9924# qhasm: xmm15 |= xmm6
9925# asm 1: por <xmm6=int6464#7,<xmm15=int6464#16
9926# asm 2: por <xmm6=%xmm6,<xmm15=%xmm15
9927por %xmm6,%xmm15
9928
9929# qhasm: xmm11 ^= xmm12
9930# asm 1: pxor <xmm12=int6464#11,<xmm11=int6464#9
9931# asm 2: pxor <xmm12=%xmm10,<xmm11=%xmm8
9932pxor %xmm10,%xmm8
9933
9934# qhasm: xmm10 ^= xmm13
9935# asm 1: pxor <xmm13=int6464#12,<xmm10=int6464#10
9936# asm 2: pxor <xmm13=%xmm11,<xmm10=%xmm9
9937pxor %xmm11,%xmm9
9938
9939# qhasm: xmm9 ^= xmm14
9940# asm 1: pxor <xmm14=int6464#14,<xmm9=int6464#13
9941# asm 2: pxor <xmm14=%xmm13,<xmm9=%xmm12
9942pxor %xmm13,%xmm12
9943
9944# qhasm: xmm8 ^= xmm15
9945# asm 1: pxor <xmm15=int6464#16,<xmm8=int6464#15
9946# asm 2: pxor <xmm15=%xmm15,<xmm8=%xmm14
9947pxor %xmm15,%xmm14
9948
9949# qhasm: xmm12 = xmm11
9950# asm 1: movdqa <xmm11=int6464#9,>xmm12=int6464#11
9951# asm 2: movdqa <xmm11=%xmm8,>xmm12=%xmm10
9952movdqa %xmm8,%xmm10
9953
9954# qhasm: xmm12 ^= xmm10
9955# asm 1: pxor <xmm10=int6464#10,<xmm12=int6464#11
9956# asm 2: pxor <xmm10=%xmm9,<xmm12=%xmm10
9957pxor %xmm9,%xmm10
9958
9959# qhasm: xmm11 &= xmm9
9960# asm 1: pand <xmm9=int6464#13,<xmm11=int6464#9
9961# asm 2: pand <xmm9=%xmm12,<xmm11=%xmm8
9962pand %xmm12,%xmm8
9963
9964# qhasm: xmm14 = xmm8
9965# asm 1: movdqa <xmm8=int6464#15,>xmm14=int6464#12
9966# asm 2: movdqa <xmm8=%xmm14,>xmm14=%xmm11
9967movdqa %xmm14,%xmm11
9968
9969# qhasm: xmm14 ^= xmm11
9970# asm 1: pxor <xmm11=int6464#9,<xmm14=int6464#12
9971# asm 2: pxor <xmm11=%xmm8,<xmm14=%xmm11
9972pxor %xmm8,%xmm11
9973
9974# qhasm: xmm15 = xmm12
9975# asm 1: movdqa <xmm12=int6464#11,>xmm15=int6464#14
9976# asm 2: movdqa <xmm12=%xmm10,>xmm15=%xmm13
9977movdqa %xmm10,%xmm13
9978
9979# qhasm: xmm15 &= xmm14
9980# asm 1: pand <xmm14=int6464#12,<xmm15=int6464#14
9981# asm 2: pand <xmm14=%xmm11,<xmm15=%xmm13
9982pand %xmm11,%xmm13
9983
9984# qhasm: xmm15 ^= xmm10
9985# asm 1: pxor <xmm10=int6464#10,<xmm15=int6464#14
9986# asm 2: pxor <xmm10=%xmm9,<xmm15=%xmm13
9987pxor %xmm9,%xmm13
9988
9989# qhasm: xmm13 = xmm9
9990# asm 1: movdqa <xmm9=int6464#13,>xmm13=int6464#16
9991# asm 2: movdqa <xmm9=%xmm12,>xmm13=%xmm15
9992movdqa %xmm12,%xmm15
9993
9994# qhasm: xmm13 ^= xmm8
9995# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
9996# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
9997pxor %xmm14,%xmm15
9998
9999# qhasm: xmm11 ^= xmm10
10000# asm 1: pxor <xmm10=int6464#10,<xmm11=int6464#9
10001# asm 2: pxor <xmm10=%xmm9,<xmm11=%xmm8
10002pxor %xmm9,%xmm8
10003
10004# qhasm: xmm13 &= xmm11
10005# asm 1: pand <xmm11=int6464#9,<xmm13=int6464#16
10006# asm 2: pand <xmm11=%xmm8,<xmm13=%xmm15
10007pand %xmm8,%xmm15
10008
10009# qhasm: xmm13 ^= xmm8
10010# asm 1: pxor <xmm8=int6464#15,<xmm13=int6464#16
10011# asm 2: pxor <xmm8=%xmm14,<xmm13=%xmm15
10012pxor %xmm14,%xmm15
10013
10014# qhasm: xmm9 ^= xmm13
10015# asm 1: pxor <xmm13=int6464#16,<xmm9=int6464#13
10016# asm 2: pxor <xmm13=%xmm15,<xmm9=%xmm12
10017pxor %xmm15,%xmm12
10018
10019# qhasm: xmm10 = xmm14
10020# asm 1: movdqa <xmm14=int6464#12,>xmm10=int6464#9
10021# asm 2: movdqa <xmm14=%xmm11,>xmm10=%xmm8
10022movdqa %xmm11,%xmm8
10023
10024# qhasm: xmm10 ^= xmm13
10025# asm 1: pxor <xmm13=int6464#16,<xmm10=int6464#9
10026# asm 2: pxor <xmm13=%xmm15,<xmm10=%xmm8
10027pxor %xmm15,%xmm8
10028
10029# qhasm: xmm10 &= xmm8
10030# asm 1: pand <xmm8=int6464#15,<xmm10=int6464#9
10031# asm 2: pand <xmm8=%xmm14,<xmm10=%xmm8
10032pand %xmm14,%xmm8
10033
10034# qhasm: xmm9 ^= xmm10
10035# asm 1: pxor <xmm10=int6464#9,<xmm9=int6464#13
10036# asm 2: pxor <xmm10=%xmm8,<xmm9=%xmm12
10037pxor %xmm8,%xmm12
10038
10039# qhasm: xmm14 ^= xmm10
10040# asm 1: pxor <xmm10=int6464#9,<xmm14=int6464#12
10041# asm 2: pxor <xmm10=%xmm8,<xmm14=%xmm11
10042pxor %xmm8,%xmm11
10043
10044# qhasm: xmm14 &= xmm15
10045# asm 1: pand <xmm15=int6464#14,<xmm14=int6464#12
10046# asm 2: pand <xmm15=%xmm13,<xmm14=%xmm11
10047pand %xmm13,%xmm11
10048
10049# qhasm: xmm14 ^= xmm12
10050# asm 1: pxor <xmm12=int6464#11,<xmm14=int6464#12
10051# asm 2: pxor <xmm12=%xmm10,<xmm14=%xmm11
10052pxor %xmm10,%xmm11
10053
10054# qhasm: xmm12 = xmm6
10055# asm 1: movdqa <xmm6=int6464#7,>xmm12=int6464#9
10056# asm 2: movdqa <xmm6=%xmm6,>xmm12=%xmm8
10057movdqa %xmm6,%xmm8
10058
10059# qhasm: xmm8 = xmm5
10060# asm 1: movdqa <xmm5=int6464#6,>xmm8=int6464#10
10061# asm 2: movdqa <xmm5=%xmm5,>xmm8=%xmm9
10062movdqa %xmm5,%xmm9
10063
10064# qhasm: xmm10 = xmm15
10065# asm 1: movdqa <xmm15=int6464#14,>xmm10=int6464#11
10066# asm 2: movdqa <xmm15=%xmm13,>xmm10=%xmm10
10067movdqa %xmm13,%xmm10
10068
10069# qhasm: xmm10 ^= xmm14
10070# asm 1: pxor <xmm14=int6464#12,<xmm10=int6464#11
10071# asm 2: pxor <xmm14=%xmm11,<xmm10=%xmm10
10072pxor %xmm11,%xmm10
10073
10074# qhasm: xmm10 &= xmm6
10075# asm 1: pand <xmm6=int6464#7,<xmm10=int6464#11
10076# asm 2: pand <xmm6=%xmm6,<xmm10=%xmm10
10077pand %xmm6,%xmm10
10078
10079# qhasm: xmm6 ^= xmm5
10080# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10081# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10082pxor %xmm5,%xmm6
10083
10084# qhasm: xmm6 &= xmm14
10085# asm 1: pand <xmm14=int6464#12,<xmm6=int6464#7
10086# asm 2: pand <xmm14=%xmm11,<xmm6=%xmm6
10087pand %xmm11,%xmm6
10088
10089# qhasm: xmm5 &= xmm15
10090# asm 1: pand <xmm15=int6464#14,<xmm5=int6464#6
10091# asm 2: pand <xmm15=%xmm13,<xmm5=%xmm5
10092pand %xmm13,%xmm5
10093
10094# qhasm: xmm6 ^= xmm5
10095# asm 1: pxor <xmm5=int6464#6,<xmm6=int6464#7
10096# asm 2: pxor <xmm5=%xmm5,<xmm6=%xmm6
10097pxor %xmm5,%xmm6
10098
10099# qhasm: xmm5 ^= xmm10
10100# asm 1: pxor <xmm10=int6464#11,<xmm5=int6464#6
10101# asm 2: pxor <xmm10=%xmm10,<xmm5=%xmm5
10102pxor %xmm10,%xmm5
10103
10104# qhasm: xmm12 ^= xmm0
10105# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#9
10106# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm8
10107pxor %xmm0,%xmm8
10108
10109# qhasm: xmm8 ^= xmm3
10110# asm 1: pxor <xmm3=int6464#4,<xmm8=int6464#10
10111# asm 2: pxor <xmm3=%xmm3,<xmm8=%xmm9
10112pxor %xmm3,%xmm9
10113
10114# qhasm: xmm15 ^= xmm13
10115# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10116# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10117pxor %xmm15,%xmm13
10118
10119# qhasm: xmm14 ^= xmm9
10120# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10121# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10122pxor %xmm12,%xmm11
10123
10124# qhasm: xmm11 = xmm15
10125# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10126# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10127movdqa %xmm13,%xmm10
10128
10129# qhasm: xmm11 ^= xmm14
10130# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10131# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10132pxor %xmm11,%xmm10
10133
10134# qhasm: xmm11 &= xmm12
10135# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10136# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10137pand %xmm8,%xmm10
10138
10139# qhasm: xmm12 ^= xmm8
10140# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10141# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10142pxor %xmm9,%xmm8
10143
10144# qhasm: xmm12 &= xmm14
10145# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10146# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10147pand %xmm11,%xmm8
10148
10149# qhasm: xmm8 &= xmm15
10150# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10151# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10152pand %xmm13,%xmm9
10153
10154# qhasm: xmm8 ^= xmm12
10155# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10156# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10157pxor %xmm8,%xmm9
10158
10159# qhasm: xmm12 ^= xmm11
10160# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10161# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10162pxor %xmm10,%xmm8
10163
10164# qhasm: xmm10 = xmm13
10165# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10166# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10167movdqa %xmm15,%xmm10
10168
10169# qhasm: xmm10 ^= xmm9
10170# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10171# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10172pxor %xmm12,%xmm10
10173
10174# qhasm: xmm10 &= xmm0
10175# asm 1: pand <xmm0=int6464#1,<xmm10=int6464#11
10176# asm 2: pand <xmm0=%xmm0,<xmm10=%xmm10
10177pand %xmm0,%xmm10
10178
10179# qhasm: xmm0 ^= xmm3
10180# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10181# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10182pxor %xmm3,%xmm0
10183
10184# qhasm: xmm0 &= xmm9
10185# asm 1: pand <xmm9=int6464#13,<xmm0=int6464#1
10186# asm 2: pand <xmm9=%xmm12,<xmm0=%xmm0
10187pand %xmm12,%xmm0
10188
10189# qhasm: xmm3 &= xmm13
10190# asm 1: pand <xmm13=int6464#16,<xmm3=int6464#4
10191# asm 2: pand <xmm13=%xmm15,<xmm3=%xmm3
10192pand %xmm15,%xmm3
10193
10194# qhasm: xmm0 ^= xmm3
10195# asm 1: pxor <xmm3=int6464#4,<xmm0=int6464#1
10196# asm 2: pxor <xmm3=%xmm3,<xmm0=%xmm0
10197pxor %xmm3,%xmm0
10198
10199# qhasm: xmm3 ^= xmm10
10200# asm 1: pxor <xmm10=int6464#11,<xmm3=int6464#4
10201# asm 2: pxor <xmm10=%xmm10,<xmm3=%xmm3
10202pxor %xmm10,%xmm3
10203
10204# qhasm: xmm6 ^= xmm12
10205# asm 1: pxor <xmm12=int6464#9,<xmm6=int6464#7
10206# asm 2: pxor <xmm12=%xmm8,<xmm6=%xmm6
10207pxor %xmm8,%xmm6
10208
10209# qhasm: xmm0 ^= xmm12
10210# asm 1: pxor <xmm12=int6464#9,<xmm0=int6464#1
10211# asm 2: pxor <xmm12=%xmm8,<xmm0=%xmm0
10212pxor %xmm8,%xmm0
10213
10214# qhasm: xmm5 ^= xmm8
10215# asm 1: pxor <xmm8=int6464#10,<xmm5=int6464#6
10216# asm 2: pxor <xmm8=%xmm9,<xmm5=%xmm5
10217pxor %xmm9,%xmm5
10218
10219# qhasm: xmm3 ^= xmm8
10220# asm 1: pxor <xmm8=int6464#10,<xmm3=int6464#4
10221# asm 2: pxor <xmm8=%xmm9,<xmm3=%xmm3
10222pxor %xmm9,%xmm3
10223
10224# qhasm: xmm12 = xmm7
10225# asm 1: movdqa <xmm7=int6464#8,>xmm12=int6464#9
10226# asm 2: movdqa <xmm7=%xmm7,>xmm12=%xmm8
10227movdqa %xmm7,%xmm8
10228
10229# qhasm: xmm8 = xmm1
10230# asm 1: movdqa <xmm1=int6464#2,>xmm8=int6464#10
10231# asm 2: movdqa <xmm1=%xmm1,>xmm8=%xmm9
10232movdqa %xmm1,%xmm9
10233
10234# qhasm: xmm12 ^= xmm4
10235# asm 1: pxor <xmm4=int6464#5,<xmm12=int6464#9
10236# asm 2: pxor <xmm4=%xmm4,<xmm12=%xmm8
10237pxor %xmm4,%xmm8
10238
10239# qhasm: xmm8 ^= xmm2
10240# asm 1: pxor <xmm2=int6464#3,<xmm8=int6464#10
10241# asm 2: pxor <xmm2=%xmm2,<xmm8=%xmm9
10242pxor %xmm2,%xmm9
10243
10244# qhasm: xmm11 = xmm15
10245# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10246# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10247movdqa %xmm13,%xmm10
10248
10249# qhasm: xmm11 ^= xmm14
10250# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10251# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10252pxor %xmm11,%xmm10
10253
10254# qhasm: xmm11 &= xmm12
10255# asm 1: pand <xmm12=int6464#9,<xmm11=int6464#11
10256# asm 2: pand <xmm12=%xmm8,<xmm11=%xmm10
10257pand %xmm8,%xmm10
10258
10259# qhasm: xmm12 ^= xmm8
10260# asm 1: pxor <xmm8=int6464#10,<xmm12=int6464#9
10261# asm 2: pxor <xmm8=%xmm9,<xmm12=%xmm8
10262pxor %xmm9,%xmm8
10263
10264# qhasm: xmm12 &= xmm14
10265# asm 1: pand <xmm14=int6464#12,<xmm12=int6464#9
10266# asm 2: pand <xmm14=%xmm11,<xmm12=%xmm8
10267pand %xmm11,%xmm8
10268
10269# qhasm: xmm8 &= xmm15
10270# asm 1: pand <xmm15=int6464#14,<xmm8=int6464#10
10271# asm 2: pand <xmm15=%xmm13,<xmm8=%xmm9
10272pand %xmm13,%xmm9
10273
10274# qhasm: xmm8 ^= xmm12
10275# asm 1: pxor <xmm12=int6464#9,<xmm8=int6464#10
10276# asm 2: pxor <xmm12=%xmm8,<xmm8=%xmm9
10277pxor %xmm8,%xmm9
10278
10279# qhasm: xmm12 ^= xmm11
10280# asm 1: pxor <xmm11=int6464#11,<xmm12=int6464#9
10281# asm 2: pxor <xmm11=%xmm10,<xmm12=%xmm8
10282pxor %xmm10,%xmm8
10283
10284# qhasm: xmm10 = xmm13
10285# asm 1: movdqa <xmm13=int6464#16,>xmm10=int6464#11
10286# asm 2: movdqa <xmm13=%xmm15,>xmm10=%xmm10
10287movdqa %xmm15,%xmm10
10288
10289# qhasm: xmm10 ^= xmm9
10290# asm 1: pxor <xmm9=int6464#13,<xmm10=int6464#11
10291# asm 2: pxor <xmm9=%xmm12,<xmm10=%xmm10
10292pxor %xmm12,%xmm10
10293
10294# qhasm: xmm10 &= xmm4
10295# asm 1: pand <xmm4=int6464#5,<xmm10=int6464#11
10296# asm 2: pand <xmm4=%xmm4,<xmm10=%xmm10
10297pand %xmm4,%xmm10
10298
10299# qhasm: xmm4 ^= xmm2
10300# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10301# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10302pxor %xmm2,%xmm4
10303
10304# qhasm: xmm4 &= xmm9
10305# asm 1: pand <xmm9=int6464#13,<xmm4=int6464#5
10306# asm 2: pand <xmm9=%xmm12,<xmm4=%xmm4
10307pand %xmm12,%xmm4
10308
10309# qhasm: xmm2 &= xmm13
10310# asm 1: pand <xmm13=int6464#16,<xmm2=int6464#3
10311# asm 2: pand <xmm13=%xmm15,<xmm2=%xmm2
10312pand %xmm15,%xmm2
10313
10314# qhasm: xmm4 ^= xmm2
10315# asm 1: pxor <xmm2=int6464#3,<xmm4=int6464#5
10316# asm 2: pxor <xmm2=%xmm2,<xmm4=%xmm4
10317pxor %xmm2,%xmm4
10318
10319# qhasm: xmm2 ^= xmm10
10320# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#3
10321# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm2
10322pxor %xmm10,%xmm2
10323
10324# qhasm: xmm15 ^= xmm13
10325# asm 1: pxor <xmm13=int6464#16,<xmm15=int6464#14
10326# asm 2: pxor <xmm13=%xmm15,<xmm15=%xmm13
10327pxor %xmm15,%xmm13
10328
10329# qhasm: xmm14 ^= xmm9
10330# asm 1: pxor <xmm9=int6464#13,<xmm14=int6464#12
10331# asm 2: pxor <xmm9=%xmm12,<xmm14=%xmm11
10332pxor %xmm12,%xmm11
10333
10334# qhasm: xmm11 = xmm15
10335# asm 1: movdqa <xmm15=int6464#14,>xmm11=int6464#11
10336# asm 2: movdqa <xmm15=%xmm13,>xmm11=%xmm10
10337movdqa %xmm13,%xmm10
10338
10339# qhasm: xmm11 ^= xmm14
10340# asm 1: pxor <xmm14=int6464#12,<xmm11=int6464#11
10341# asm 2: pxor <xmm14=%xmm11,<xmm11=%xmm10
10342pxor %xmm11,%xmm10
10343
10344# qhasm: xmm11 &= xmm7
10345# asm 1: pand <xmm7=int6464#8,<xmm11=int6464#11
10346# asm 2: pand <xmm7=%xmm7,<xmm11=%xmm10
10347pand %xmm7,%xmm10
10348
10349# qhasm: xmm7 ^= xmm1
10350# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10351# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10352pxor %xmm1,%xmm7
10353
10354# qhasm: xmm7 &= xmm14
10355# asm 1: pand <xmm14=int6464#12,<xmm7=int6464#8
10356# asm 2: pand <xmm14=%xmm11,<xmm7=%xmm7
10357pand %xmm11,%xmm7
10358
10359# qhasm: xmm1 &= xmm15
10360# asm 1: pand <xmm15=int6464#14,<xmm1=int6464#2
10361# asm 2: pand <xmm15=%xmm13,<xmm1=%xmm1
10362pand %xmm13,%xmm1
10363
10364# qhasm: xmm7 ^= xmm1
10365# asm 1: pxor <xmm1=int6464#2,<xmm7=int6464#8
10366# asm 2: pxor <xmm1=%xmm1,<xmm7=%xmm7
10367pxor %xmm1,%xmm7
10368
10369# qhasm: xmm1 ^= xmm11
10370# asm 1: pxor <xmm11=int6464#11,<xmm1=int6464#2
10371# asm 2: pxor <xmm11=%xmm10,<xmm1=%xmm1
10372pxor %xmm10,%xmm1
10373
10374# qhasm: xmm7 ^= xmm12
10375# asm 1: pxor <xmm12=int6464#9,<xmm7=int6464#8
10376# asm 2: pxor <xmm12=%xmm8,<xmm7=%xmm7
10377pxor %xmm8,%xmm7
10378
10379# qhasm: xmm4 ^= xmm12
10380# asm 1: pxor <xmm12=int6464#9,<xmm4=int6464#5
10381# asm 2: pxor <xmm12=%xmm8,<xmm4=%xmm4
10382pxor %xmm8,%xmm4
10383
10384# qhasm: xmm1 ^= xmm8
10385# asm 1: pxor <xmm8=int6464#10,<xmm1=int6464#2
10386# asm 2: pxor <xmm8=%xmm9,<xmm1=%xmm1
10387pxor %xmm9,%xmm1
10388
10389# qhasm: xmm2 ^= xmm8
10390# asm 1: pxor <xmm8=int6464#10,<xmm2=int6464#3
10391# asm 2: pxor <xmm8=%xmm9,<xmm2=%xmm2
10392pxor %xmm9,%xmm2
10393
10394# qhasm: xmm7 ^= xmm0
10395# asm 1: pxor <xmm0=int6464#1,<xmm7=int6464#8
10396# asm 2: pxor <xmm0=%xmm0,<xmm7=%xmm7
10397pxor %xmm0,%xmm7
10398
10399# qhasm: xmm1 ^= xmm6
10400# asm 1: pxor <xmm6=int6464#7,<xmm1=int6464#2
10401# asm 2: pxor <xmm6=%xmm6,<xmm1=%xmm1
10402pxor %xmm6,%xmm1
10403
10404# qhasm: xmm4 ^= xmm7
10405# asm 1: pxor <xmm7=int6464#8,<xmm4=int6464#5
10406# asm 2: pxor <xmm7=%xmm7,<xmm4=%xmm4
10407pxor %xmm7,%xmm4
10408
10409# qhasm: xmm6 ^= xmm0
10410# asm 1: pxor <xmm0=int6464#1,<xmm6=int6464#7
10411# asm 2: pxor <xmm0=%xmm0,<xmm6=%xmm6
10412pxor %xmm0,%xmm6
10413
10414# qhasm: xmm0 ^= xmm1
10415# asm 1: pxor <xmm1=int6464#2,<xmm0=int6464#1
10416# asm 2: pxor <xmm1=%xmm1,<xmm0=%xmm0
10417pxor %xmm1,%xmm0
10418
10419# qhasm: xmm1 ^= xmm5
10420# asm 1: pxor <xmm5=int6464#6,<xmm1=int6464#2
10421# asm 2: pxor <xmm5=%xmm5,<xmm1=%xmm1
10422pxor %xmm5,%xmm1
10423
10424# qhasm: xmm5 ^= xmm2
10425# asm 1: pxor <xmm2=int6464#3,<xmm5=int6464#6
10426# asm 2: pxor <xmm2=%xmm2,<xmm5=%xmm5
10427pxor %xmm2,%xmm5
10428
10429# qhasm: xmm4 ^= xmm5
10430# asm 1: pxor <xmm5=int6464#6,<xmm4=int6464#5
10431# asm 2: pxor <xmm5=%xmm5,<xmm4=%xmm4
10432pxor %xmm5,%xmm4
10433
10434# qhasm: xmm2 ^= xmm3
10435# asm 1: pxor <xmm3=int6464#4,<xmm2=int6464#3
10436# asm 2: pxor <xmm3=%xmm3,<xmm2=%xmm2
10437pxor %xmm3,%xmm2
10438
10439# qhasm: xmm3 ^= xmm5
10440# asm 1: pxor <xmm5=int6464#6,<xmm3=int6464#4
10441# asm 2: pxor <xmm5=%xmm5,<xmm3=%xmm3
10442pxor %xmm5,%xmm3
10443
10444# qhasm: xmm6 ^= xmm3
10445# asm 1: pxor <xmm3=int6464#4,<xmm6=int6464#7
10446# asm 2: pxor <xmm3=%xmm3,<xmm6=%xmm6
10447pxor %xmm3,%xmm6
10448
10449# qhasm: xmm8 = shuffle dwords of xmm0 by 0x93
10450# asm 1: pshufd $0x93,<xmm0=int6464#1,>xmm8=int6464#9
10451# asm 2: pshufd $0x93,<xmm0=%xmm0,>xmm8=%xmm8
10452pshufd $0x93,%xmm0,%xmm8
10453
10454# qhasm: xmm9 = shuffle dwords of xmm1 by 0x93
10455# asm 1: pshufd $0x93,<xmm1=int6464#2,>xmm9=int6464#10
10456# asm 2: pshufd $0x93,<xmm1=%xmm1,>xmm9=%xmm9
10457pshufd $0x93,%xmm1,%xmm9
10458
10459# qhasm: xmm10 = shuffle dwords of xmm4 by 0x93
10460# asm 1: pshufd $0x93,<xmm4=int6464#5,>xmm10=int6464#11
10461# asm 2: pshufd $0x93,<xmm4=%xmm4,>xmm10=%xmm10
10462pshufd $0x93,%xmm4,%xmm10
10463
10464# qhasm: xmm11 = shuffle dwords of xmm6 by 0x93
10465# asm 1: pshufd $0x93,<xmm6=int6464#7,>xmm11=int6464#12
10466# asm 2: pshufd $0x93,<xmm6=%xmm6,>xmm11=%xmm11
10467pshufd $0x93,%xmm6,%xmm11
10468
10469# qhasm: xmm12 = shuffle dwords of xmm3 by 0x93
10470# asm 1: pshufd $0x93,<xmm3=int6464#4,>xmm12=int6464#13
10471# asm 2: pshufd $0x93,<xmm3=%xmm3,>xmm12=%xmm12
10472pshufd $0x93,%xmm3,%xmm12
10473
10474# qhasm: xmm13 = shuffle dwords of xmm7 by 0x93
10475# asm 1: pshufd $0x93,<xmm7=int6464#8,>xmm13=int6464#14
10476# asm 2: pshufd $0x93,<xmm7=%xmm7,>xmm13=%xmm13
10477pshufd $0x93,%xmm7,%xmm13
10478
10479# qhasm: xmm14 = shuffle dwords of xmm2 by 0x93
10480# asm 1: pshufd $0x93,<xmm2=int6464#3,>xmm14=int6464#15
10481# asm 2: pshufd $0x93,<xmm2=%xmm2,>xmm14=%xmm14
10482pshufd $0x93,%xmm2,%xmm14
10483
10484# qhasm: xmm15 = shuffle dwords of xmm5 by 0x93
10485# asm 1: pshufd $0x93,<xmm5=int6464#6,>xmm15=int6464#16
10486# asm 2: pshufd $0x93,<xmm5=%xmm5,>xmm15=%xmm15
10487pshufd $0x93,%xmm5,%xmm15
10488
10489# qhasm: xmm0 ^= xmm8
10490# asm 1: pxor <xmm8=int6464#9,<xmm0=int6464#1
10491# asm 2: pxor <xmm8=%xmm8,<xmm0=%xmm0
10492pxor %xmm8,%xmm0
10493
10494# qhasm: xmm1 ^= xmm9
10495# asm 1: pxor <xmm9=int6464#10,<xmm1=int6464#2
10496# asm 2: pxor <xmm9=%xmm9,<xmm1=%xmm1
10497pxor %xmm9,%xmm1
10498
10499# qhasm: xmm4 ^= xmm10
10500# asm 1: pxor <xmm10=int6464#11,<xmm4=int6464#5
10501# asm 2: pxor <xmm10=%xmm10,<xmm4=%xmm4
10502pxor %xmm10,%xmm4
10503
10504# qhasm: xmm6 ^= xmm11
10505# asm 1: pxor <xmm11=int6464#12,<xmm6=int6464#7
10506# asm 2: pxor <xmm11=%xmm11,<xmm6=%xmm6
10507pxor %xmm11,%xmm6
10508
10509# qhasm: xmm3 ^= xmm12
10510# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#4
10511# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm3
10512pxor %xmm12,%xmm3
10513
10514# qhasm: xmm7 ^= xmm13
10515# asm 1: pxor <xmm13=int6464#14,<xmm7=int6464#8
10516# asm 2: pxor <xmm13=%xmm13,<xmm7=%xmm7
10517pxor %xmm13,%xmm7
10518
10519# qhasm: xmm2 ^= xmm14
10520# asm 1: pxor <xmm14=int6464#15,<xmm2=int6464#3
10521# asm 2: pxor <xmm14=%xmm14,<xmm2=%xmm2
10522pxor %xmm14,%xmm2
10523
10524# qhasm: xmm5 ^= xmm15
10525# asm 1: pxor <xmm15=int6464#16,<xmm5=int6464#6
10526# asm 2: pxor <xmm15=%xmm15,<xmm5=%xmm5
10527pxor %xmm15,%xmm5
10528
10529# qhasm: xmm8 ^= xmm5
10530# asm 1: pxor <xmm5=int6464#6,<xmm8=int6464#9
10531# asm 2: pxor <xmm5=%xmm5,<xmm8=%xmm8
10532pxor %xmm5,%xmm8
10533
10534# qhasm: xmm9 ^= xmm0
10535# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
10536# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
10537pxor %xmm0,%xmm9
10538
10539# qhasm: xmm10 ^= xmm1
10540# asm 1: pxor <xmm1=int6464#2,<xmm10=int6464#11
10541# asm 2: pxor <xmm1=%xmm1,<xmm10=%xmm10
10542pxor %xmm1,%xmm10
10543
10544# qhasm: xmm9 ^= xmm5
10545# asm 1: pxor <xmm5=int6464#6,<xmm9=int6464#10
10546# asm 2: pxor <xmm5=%xmm5,<xmm9=%xmm9
10547pxor %xmm5,%xmm9
10548
10549# qhasm: xmm11 ^= xmm4
10550# asm 1: pxor <xmm4=int6464#5,<xmm11=int6464#12
10551# asm 2: pxor <xmm4=%xmm4,<xmm11=%xmm11
10552pxor %xmm4,%xmm11
10553
10554# qhasm: xmm12 ^= xmm6
10555# asm 1: pxor <xmm6=int6464#7,<xmm12=int6464#13
10556# asm 2: pxor <xmm6=%xmm6,<xmm12=%xmm12
10557pxor %xmm6,%xmm12
10558
10559# qhasm: xmm13 ^= xmm3
10560# asm 1: pxor <xmm3=int6464#4,<xmm13=int6464#14
10561# asm 2: pxor <xmm3=%xmm3,<xmm13=%xmm13
10562pxor %xmm3,%xmm13
10563
10564# qhasm: xmm11 ^= xmm5
10565# asm 1: pxor <xmm5=int6464#6,<xmm11=int6464#12
10566# asm 2: pxor <xmm5=%xmm5,<xmm11=%xmm11
10567pxor %xmm5,%xmm11
10568
10569# qhasm: xmm14 ^= xmm7
10570# asm 1: pxor <xmm7=int6464#8,<xmm14=int6464#15
10571# asm 2: pxor <xmm7=%xmm7,<xmm14=%xmm14
10572pxor %xmm7,%xmm14
10573
10574# qhasm: xmm15 ^= xmm2
10575# asm 1: pxor <xmm2=int6464#3,<xmm15=int6464#16
10576# asm 2: pxor <xmm2=%xmm2,<xmm15=%xmm15
10577pxor %xmm2,%xmm15
10578
10579# qhasm: xmm12 ^= xmm5
10580# asm 1: pxor <xmm5=int6464#6,<xmm12=int6464#13
10581# asm 2: pxor <xmm5=%xmm5,<xmm12=%xmm12
10582pxor %xmm5,%xmm12
10583
10584# qhasm: xmm0 = shuffle dwords of xmm0 by 0x4E
10585# asm 1: pshufd $0x4E,<xmm0=int6464#1,>xmm0=int6464#1
10586# asm 2: pshufd $0x4E,<xmm0=%xmm0,>xmm0=%xmm0
10587pshufd $0x4E,%xmm0,%xmm0
10588
10589# qhasm: xmm1 = shuffle dwords of xmm1 by 0x4E
10590# asm 1: pshufd $0x4E,<xmm1=int6464#2,>xmm1=int6464#2
10591# asm 2: pshufd $0x4E,<xmm1=%xmm1,>xmm1=%xmm1
10592pshufd $0x4E,%xmm1,%xmm1
10593
10594# qhasm: xmm4 = shuffle dwords of xmm4 by 0x4E
10595# asm 1: pshufd $0x4E,<xmm4=int6464#5,>xmm4=int6464#5
10596# asm 2: pshufd $0x4E,<xmm4=%xmm4,>xmm4=%xmm4
10597pshufd $0x4E,%xmm4,%xmm4
10598
10599# qhasm: xmm6 = shuffle dwords of xmm6 by 0x4E
10600# asm 1: pshufd $0x4E,<xmm6=int6464#7,>xmm6=int6464#7
10601# asm 2: pshufd $0x4E,<xmm6=%xmm6,>xmm6=%xmm6
10602pshufd $0x4E,%xmm6,%xmm6
10603
10604# qhasm: xmm3 = shuffle dwords of xmm3 by 0x4E
10605# asm 1: pshufd $0x4E,<xmm3=int6464#4,>xmm3=int6464#4
10606# asm 2: pshufd $0x4E,<xmm3=%xmm3,>xmm3=%xmm3
10607pshufd $0x4E,%xmm3,%xmm3
10608
10609# qhasm: xmm7 = shuffle dwords of xmm7 by 0x4E
10610# asm 1: pshufd $0x4E,<xmm7=int6464#8,>xmm7=int6464#8
10611# asm 2: pshufd $0x4E,<xmm7=%xmm7,>xmm7=%xmm7
10612pshufd $0x4E,%xmm7,%xmm7
10613
10614# qhasm: xmm2 = shuffle dwords of xmm2 by 0x4E
10615# asm 1: pshufd $0x4E,<xmm2=int6464#3,>xmm2=int6464#3
10616# asm 2: pshufd $0x4E,<xmm2=%xmm2,>xmm2=%xmm2
10617pshufd $0x4E,%xmm2,%xmm2
10618
10619# qhasm: xmm5 = shuffle dwords of xmm5 by 0x4E
10620# asm 1: pshufd $0x4E,<xmm5=int6464#6,>xmm5=int6464#6
10621# asm 2: pshufd $0x4E,<xmm5=%xmm5,>xmm5=%xmm5
10622pshufd $0x4E,%xmm5,%xmm5
10623
10624# qhasm: xmm8 ^= xmm0
10625# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
10626# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
10627pxor %xmm0,%xmm8
10628
10629# qhasm: xmm9 ^= xmm1
10630# asm 1: pxor <xmm1=int6464#2,<xmm9=int6464#10
10631# asm 2: pxor <xmm1=%xmm1,<xmm9=%xmm9
10632pxor %xmm1,%xmm9
10633
10634# qhasm: xmm10 ^= xmm4
10635# asm 1: pxor <xmm4=int6464#5,<xmm10=int6464#11
10636# asm 2: pxor <xmm4=%xmm4,<xmm10=%xmm10
10637pxor %xmm4,%xmm10
10638
10639# qhasm: xmm11 ^= xmm6
10640# asm 1: pxor <xmm6=int6464#7,<xmm11=int6464#12
10641# asm 2: pxor <xmm6=%xmm6,<xmm11=%xmm11
10642pxor %xmm6,%xmm11
10643
10644# qhasm: xmm12 ^= xmm3
10645# asm 1: pxor <xmm3=int6464#4,<xmm12=int6464#13
10646# asm 2: pxor <xmm3=%xmm3,<xmm12=%xmm12
10647pxor %xmm3,%xmm12
10648
10649# qhasm: xmm13 ^= xmm7
10650# asm 1: pxor <xmm7=int6464#8,<xmm13=int6464#14
10651# asm 2: pxor <xmm7=%xmm7,<xmm13=%xmm13
10652pxor %xmm7,%xmm13
10653
10654# qhasm: xmm14 ^= xmm2
10655# asm 1: pxor <xmm2=int6464#3,<xmm14=int6464#15
10656# asm 2: pxor <xmm2=%xmm2,<xmm14=%xmm14
10657pxor %xmm2,%xmm14
10658
10659# qhasm: xmm15 ^= xmm5
10660# asm 1: pxor <xmm5=int6464#6,<xmm15=int6464#16
10661# asm 2: pxor <xmm5=%xmm5,<xmm15=%xmm15
10662pxor %xmm5,%xmm15
10663
10664# qhasm: xmm8 ^= *(int128 *)(c + 1152)
10665# asm 1: pxor 1152(<c=int64#5),<xmm8=int6464#9
10666# asm 2: pxor 1152(<c=%r8),<xmm8=%xmm8
10667pxor 1152(%r8),%xmm8
10668
10669# qhasm: shuffle bytes of xmm8 by SRM0
10670# asm 1: pshufb SRM0,<xmm8=int6464#9
10671# asm 2: pshufb SRM0,<xmm8=%xmm8
10672pshufb SRM0,%xmm8
10673
10674# qhasm: xmm9 ^= *(int128 *)(c + 1168)
10675# asm 1: pxor 1168(<c=int64#5),<xmm9=int6464#10
10676# asm 2: pxor 1168(<c=%r8),<xmm9=%xmm9
10677pxor 1168(%r8),%xmm9
10678
10679# qhasm: shuffle bytes of xmm9 by SRM0
10680# asm 1: pshufb SRM0,<xmm9=int6464#10
10681# asm 2: pshufb SRM0,<xmm9=%xmm9
10682pshufb SRM0,%xmm9
10683
10684# qhasm: xmm10 ^= *(int128 *)(c + 1184)
10685# asm 1: pxor 1184(<c=int64#5),<xmm10=int6464#11
10686# asm 2: pxor 1184(<c=%r8),<xmm10=%xmm10
10687pxor 1184(%r8),%xmm10
10688
10689# qhasm: shuffle bytes of xmm10 by SRM0
10690# asm 1: pshufb SRM0,<xmm10=int6464#11
10691# asm 2: pshufb SRM0,<xmm10=%xmm10
10692pshufb SRM0,%xmm10
10693
10694# qhasm: xmm11 ^= *(int128 *)(c + 1200)
10695# asm 1: pxor 1200(<c=int64#5),<xmm11=int6464#12
10696# asm 2: pxor 1200(<c=%r8),<xmm11=%xmm11
10697pxor 1200(%r8),%xmm11
10698
10699# qhasm: shuffle bytes of xmm11 by SRM0
10700# asm 1: pshufb SRM0,<xmm11=int6464#12
10701# asm 2: pshufb SRM0,<xmm11=%xmm11
10702pshufb SRM0,%xmm11
10703
10704# qhasm: xmm12 ^= *(int128 *)(c + 1216)
10705# asm 1: pxor 1216(<c=int64#5),<xmm12=int6464#13
10706# asm 2: pxor 1216(<c=%r8),<xmm12=%xmm12
10707pxor 1216(%r8),%xmm12
10708
10709# qhasm: shuffle bytes of xmm12 by SRM0
10710# asm 1: pshufb SRM0,<xmm12=int6464#13
10711# asm 2: pshufb SRM0,<xmm12=%xmm12
10712pshufb SRM0,%xmm12
10713
10714# qhasm: xmm13 ^= *(int128 *)(c + 1232)
10715# asm 1: pxor 1232(<c=int64#5),<xmm13=int6464#14
10716# asm 2: pxor 1232(<c=%r8),<xmm13=%xmm13
10717pxor 1232(%r8),%xmm13
10718
10719# qhasm: shuffle bytes of xmm13 by SRM0
10720# asm 1: pshufb SRM0,<xmm13=int6464#14
10721# asm 2: pshufb SRM0,<xmm13=%xmm13
10722pshufb SRM0,%xmm13
10723
10724# qhasm: xmm14 ^= *(int128 *)(c + 1248)
10725# asm 1: pxor 1248(<c=int64#5),<xmm14=int6464#15
10726# asm 2: pxor 1248(<c=%r8),<xmm14=%xmm14
10727pxor 1248(%r8),%xmm14
10728
10729# qhasm: shuffle bytes of xmm14 by SRM0
10730# asm 1: pshufb SRM0,<xmm14=int6464#15
10731# asm 2: pshufb SRM0,<xmm14=%xmm14
10732pshufb SRM0,%xmm14
10733
10734# qhasm: xmm15 ^= *(int128 *)(c + 1264)
10735# asm 1: pxor 1264(<c=int64#5),<xmm15=int6464#16
10736# asm 2: pxor 1264(<c=%r8),<xmm15=%xmm15
10737pxor 1264(%r8),%xmm15
10738
10739# qhasm: shuffle bytes of xmm15 by SRM0
10740# asm 1: pshufb SRM0,<xmm15=int6464#16
10741# asm 2: pshufb SRM0,<xmm15=%xmm15
10742pshufb SRM0,%xmm15
10743
10744# qhasm: xmm13 ^= xmm14
10745# asm 1: pxor <xmm14=int6464#15,<xmm13=int6464#14
10746# asm 2: pxor <xmm14=%xmm14,<xmm13=%xmm13
10747pxor %xmm14,%xmm13
10748
10749# qhasm: xmm10 ^= xmm9
10750# asm 1: pxor <xmm9=int6464#10,<xmm10=int6464#11
10751# asm 2: pxor <xmm9=%xmm9,<xmm10=%xmm10
10752pxor %xmm9,%xmm10
10753
10754# qhasm: xmm13 ^= xmm8
10755# asm 1: pxor <xmm8=int6464#9,<xmm13=int6464#14
10756# asm 2: pxor <xmm8=%xmm8,<xmm13=%xmm13
10757pxor %xmm8,%xmm13
10758
10759# qhasm: xmm14 ^= xmm10
10760# asm 1: pxor <xmm10=int6464#11,<xmm14=int6464#15
10761# asm 2: pxor <xmm10=%xmm10,<xmm14=%xmm14
10762pxor %xmm10,%xmm14
10763
10764# qhasm: xmm11 ^= xmm8
10765# asm 1: pxor <xmm8=int6464#9,<xmm11=int6464#12
10766# asm 2: pxor <xmm8=%xmm8,<xmm11=%xmm11
10767pxor %xmm8,%xmm11
10768
10769# qhasm: xmm14 ^= xmm11
10770# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
10771# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
10772pxor %xmm11,%xmm14
10773
10774# qhasm: xmm11 ^= xmm15
10775# asm 1: pxor <xmm15=int6464#16,<xmm11=int6464#12
10776# asm 2: pxor <xmm15=%xmm15,<xmm11=%xmm11
10777pxor %xmm15,%xmm11
10778
10779# qhasm: xmm11 ^= xmm12
10780# asm 1: pxor <xmm12=int6464#13,<xmm11=int6464#12
10781# asm 2: pxor <xmm12=%xmm12,<xmm11=%xmm11
10782pxor %xmm12,%xmm11
10783
10784# qhasm: xmm15 ^= xmm13
10785# asm 1: pxor <xmm13=int6464#14,<xmm15=int6464#16
10786# asm 2: pxor <xmm13=%xmm13,<xmm15=%xmm15
10787pxor %xmm13,%xmm15
10788
10789# qhasm: xmm11 ^= xmm9
10790# asm 1: pxor <xmm9=int6464#10,<xmm11=int6464#12
10791# asm 2: pxor <xmm9=%xmm9,<xmm11=%xmm11
10792pxor %xmm9,%xmm11
10793
10794# qhasm: xmm12 ^= xmm13
10795# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
10796# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
10797pxor %xmm13,%xmm12
10798
10799# qhasm: xmm10 ^= xmm15
10800# asm 1: pxor <xmm15=int6464#16,<xmm10=int6464#11
10801# asm 2: pxor <xmm15=%xmm15,<xmm10=%xmm10
10802pxor %xmm15,%xmm10
10803
10804# qhasm: xmm9 ^= xmm13
10805# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
10806# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
10807pxor %xmm13,%xmm9
10808
10809# qhasm: xmm3 = xmm15
10810# asm 1: movdqa <xmm15=int6464#16,>xmm3=int6464#1
10811# asm 2: movdqa <xmm15=%xmm15,>xmm3=%xmm0
10812movdqa %xmm15,%xmm0
10813
10814# qhasm: xmm2 = xmm9
10815# asm 1: movdqa <xmm9=int6464#10,>xmm2=int6464#2
10816# asm 2: movdqa <xmm9=%xmm9,>xmm2=%xmm1
10817movdqa %xmm9,%xmm1
10818
10819# qhasm: xmm1 = xmm13
10820# asm 1: movdqa <xmm13=int6464#14,>xmm1=int6464#3
10821# asm 2: movdqa <xmm13=%xmm13,>xmm1=%xmm2
10822movdqa %xmm13,%xmm2
10823
10824# qhasm: xmm5 = xmm10
10825# asm 1: movdqa <xmm10=int6464#11,>xmm5=int6464#4
10826# asm 2: movdqa <xmm10=%xmm10,>xmm5=%xmm3
10827movdqa %xmm10,%xmm3
10828
10829# qhasm: xmm4 = xmm14
10830# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#5
10831# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm4
10832movdqa %xmm14,%xmm4
10833
10834# qhasm: xmm3 ^= xmm12
10835# asm 1: pxor <xmm12=int6464#13,<xmm3=int6464#1
10836# asm 2: pxor <xmm12=%xmm12,<xmm3=%xmm0
10837pxor %xmm12,%xmm0
10838
10839# qhasm: xmm2 ^= xmm10
10840# asm 1: pxor <xmm10=int6464#11,<xmm2=int6464#2
10841# asm 2: pxor <xmm10=%xmm10,<xmm2=%xmm1
10842pxor %xmm10,%xmm1
10843
10844# qhasm: xmm1 ^= xmm11
10845# asm 1: pxor <xmm11=int6464#12,<xmm1=int6464#3
10846# asm 2: pxor <xmm11=%xmm11,<xmm1=%xmm2
10847pxor %xmm11,%xmm2
10848
10849# qhasm: xmm5 ^= xmm12
10850# asm 1: pxor <xmm12=int6464#13,<xmm5=int6464#4
10851# asm 2: pxor <xmm12=%xmm12,<xmm5=%xmm3
10852pxor %xmm12,%xmm3
10853
10854# qhasm: xmm4 ^= xmm8
10855# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#5
10856# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm4
10857pxor %xmm8,%xmm4
10858
10859# qhasm: xmm6 = xmm3
10860# asm 1: movdqa <xmm3=int6464#1,>xmm6=int6464#6
10861# asm 2: movdqa <xmm3=%xmm0,>xmm6=%xmm5
10862movdqa %xmm0,%xmm5
10863
10864# qhasm: xmm0 = xmm2
10865# asm 1: movdqa <xmm2=int6464#2,>xmm0=int6464#7
10866# asm 2: movdqa <xmm2=%xmm1,>xmm0=%xmm6
10867movdqa %xmm1,%xmm6
10868
10869# qhasm: xmm7 = xmm3
10870# asm 1: movdqa <xmm3=int6464#1,>xmm7=int6464#8
10871# asm 2: movdqa <xmm3=%xmm0,>xmm7=%xmm7
10872movdqa %xmm0,%xmm7
10873
10874# qhasm: xmm2 |= xmm1
10875# asm 1: por <xmm1=int6464#3,<xmm2=int6464#2
10876# asm 2: por <xmm1=%xmm2,<xmm2=%xmm1
10877por %xmm2,%xmm1
10878
10879# qhasm: xmm3 |= xmm4
10880# asm 1: por <xmm4=int6464#5,<xmm3=int6464#1
10881# asm 2: por <xmm4=%xmm4,<xmm3=%xmm0
10882por %xmm4,%xmm0
10883
10884# qhasm: xmm7 ^= xmm0
10885# asm 1: pxor <xmm0=int6464#7,<xmm7=int6464#8
10886# asm 2: pxor <xmm0=%xmm6,<xmm7=%xmm7
10887pxor %xmm6,%xmm7
10888
10889# qhasm: xmm6 &= xmm4
10890# asm 1: pand <xmm4=int6464#5,<xmm6=int6464#6
10891# asm 2: pand <xmm4=%xmm4,<xmm6=%xmm5
10892pand %xmm4,%xmm5
10893
10894# qhasm: xmm0 &= xmm1
10895# asm 1: pand <xmm1=int6464#3,<xmm0=int6464#7
10896# asm 2: pand <xmm1=%xmm2,<xmm0=%xmm6
10897pand %xmm2,%xmm6
10898
10899# qhasm: xmm4 ^= xmm1
10900# asm 1: pxor <xmm1=int6464#3,<xmm4=int6464#5
10901# asm 2: pxor <xmm1=%xmm2,<xmm4=%xmm4
10902pxor %xmm2,%xmm4
10903
10904# qhasm: xmm7 &= xmm4
10905# asm 1: pand <xmm4=int6464#5,<xmm7=int6464#8
10906# asm 2: pand <xmm4=%xmm4,<xmm7=%xmm7
10907pand %xmm4,%xmm7
10908
10909# qhasm: xmm4 = xmm11
10910# asm 1: movdqa <xmm11=int6464#12,>xmm4=int6464#3
10911# asm 2: movdqa <xmm11=%xmm11,>xmm4=%xmm2
10912movdqa %xmm11,%xmm2
10913
10914# qhasm: xmm4 ^= xmm8
10915# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#3
10916# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm2
10917pxor %xmm8,%xmm2
10918
10919# qhasm: xmm5 &= xmm4
10920# asm 1: pand <xmm4=int6464#3,<xmm5=int6464#4
10921# asm 2: pand <xmm4=%xmm2,<xmm5=%xmm3
10922pand %xmm2,%xmm3
10923
10924# qhasm: xmm3 ^= xmm5
10925# asm 1: pxor <xmm5=int6464#4,<xmm3=int6464#1
10926# asm 2: pxor <xmm5=%xmm3,<xmm3=%xmm0
10927pxor %xmm3,%xmm0
10928
10929# qhasm: xmm2 ^= xmm5
10930# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
10931# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
10932pxor %xmm3,%xmm1
10933
10934# qhasm: xmm5 = xmm15
10935# asm 1: movdqa <xmm15=int6464#16,>xmm5=int6464#3
10936# asm 2: movdqa <xmm15=%xmm15,>xmm5=%xmm2
10937movdqa %xmm15,%xmm2
10938
10939# qhasm: xmm5 ^= xmm9
10940# asm 1: pxor <xmm9=int6464#10,<xmm5=int6464#3
10941# asm 2: pxor <xmm9=%xmm9,<xmm5=%xmm2
10942pxor %xmm9,%xmm2
10943
10944# qhasm: xmm4 = xmm13
10945# asm 1: movdqa <xmm13=int6464#14,>xmm4=int6464#4
10946# asm 2: movdqa <xmm13=%xmm13,>xmm4=%xmm3
10947movdqa %xmm13,%xmm3
10948
10949# qhasm: xmm1 = xmm5
10950# asm 1: movdqa <xmm5=int6464#3,>xmm1=int6464#5
10951# asm 2: movdqa <xmm5=%xmm2,>xmm1=%xmm4
10952movdqa %xmm2,%xmm4
10953
10954# qhasm: xmm4 ^= xmm14
10955# asm 1: pxor <xmm14=int6464#15,<xmm4=int6464#4
10956# asm 2: pxor <xmm14=%xmm14,<xmm4=%xmm3
10957pxor %xmm14,%xmm3
10958
10959# qhasm: xmm1 |= xmm4
10960# asm 1: por <xmm4=int6464#4,<xmm1=int6464#5
10961# asm 2: por <xmm4=%xmm3,<xmm1=%xmm4
10962por %xmm3,%xmm4
10963
10964# qhasm: xmm5 &= xmm4
10965# asm 1: pand <xmm4=int6464#4,<xmm5=int6464#3
10966# asm 2: pand <xmm4=%xmm3,<xmm5=%xmm2
10967pand %xmm3,%xmm2
10968
10969# qhasm: xmm0 ^= xmm5
10970# asm 1: pxor <xmm5=int6464#3,<xmm0=int6464#7
10971# asm 2: pxor <xmm5=%xmm2,<xmm0=%xmm6
10972pxor %xmm2,%xmm6
10973
10974# qhasm: xmm3 ^= xmm7
10975# asm 1: pxor <xmm7=int6464#8,<xmm3=int6464#1
10976# asm 2: pxor <xmm7=%xmm7,<xmm3=%xmm0
10977pxor %xmm7,%xmm0
10978
10979# qhasm: xmm2 ^= xmm6
10980# asm 1: pxor <xmm6=int6464#6,<xmm2=int6464#2
10981# asm 2: pxor <xmm6=%xmm5,<xmm2=%xmm1
10982pxor %xmm5,%xmm1
10983
10984# qhasm: xmm1 ^= xmm7
10985# asm 1: pxor <xmm7=int6464#8,<xmm1=int6464#5
10986# asm 2: pxor <xmm7=%xmm7,<xmm1=%xmm4
10987pxor %xmm7,%xmm4
10988
10989# qhasm: xmm0 ^= xmm6
10990# asm 1: pxor <xmm6=int6464#6,<xmm0=int6464#7
10991# asm 2: pxor <xmm6=%xmm5,<xmm0=%xmm6
10992pxor %xmm5,%xmm6
10993
10994# qhasm: xmm1 ^= xmm6
10995# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
10996# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
10997pxor %xmm5,%xmm4
10998
10999# qhasm: xmm4 = xmm10
11000# asm 1: movdqa <xmm10=int6464#11,>xmm4=int6464#3
11001# asm 2: movdqa <xmm10=%xmm10,>xmm4=%xmm2
11002movdqa %xmm10,%xmm2
11003
11004# qhasm: xmm5 = xmm12
11005# asm 1: movdqa <xmm12=int6464#13,>xmm5=int6464#4
11006# asm 2: movdqa <xmm12=%xmm12,>xmm5=%xmm3
11007movdqa %xmm12,%xmm3
11008
11009# qhasm: xmm6 = xmm9
11010# asm 1: movdqa <xmm9=int6464#10,>xmm6=int6464#6
11011# asm 2: movdqa <xmm9=%xmm9,>xmm6=%xmm5
11012movdqa %xmm9,%xmm5
11013
11014# qhasm: xmm7 = xmm15
11015# asm 1: movdqa <xmm15=int6464#16,>xmm7=int6464#8
11016# asm 2: movdqa <xmm15=%xmm15,>xmm7=%xmm7
11017movdqa %xmm15,%xmm7
11018
11019# qhasm: xmm4 &= xmm11
11020# asm 1: pand <xmm11=int6464#12,<xmm4=int6464#3
11021# asm 2: pand <xmm11=%xmm11,<xmm4=%xmm2
11022pand %xmm11,%xmm2
11023
11024# qhasm: xmm5 &= xmm8
11025# asm 1: pand <xmm8=int6464#9,<xmm5=int6464#4
11026# asm 2: pand <xmm8=%xmm8,<xmm5=%xmm3
11027pand %xmm8,%xmm3
11028
11029# qhasm: xmm6 &= xmm13
11030# asm 1: pand <xmm13=int6464#14,<xmm6=int6464#6
11031# asm 2: pand <xmm13=%xmm13,<xmm6=%xmm5
11032pand %xmm13,%xmm5
11033
11034# qhasm: xmm7 |= xmm14
11035# asm 1: por <xmm14=int6464#15,<xmm7=int6464#8
11036# asm 2: por <xmm14=%xmm14,<xmm7=%xmm7
11037por %xmm14,%xmm7
11038
11039# qhasm: xmm3 ^= xmm4
11040# asm 1: pxor <xmm4=int6464#3,<xmm3=int6464#1
11041# asm 2: pxor <xmm4=%xmm2,<xmm3=%xmm0
11042pxor %xmm2,%xmm0
11043
11044# qhasm: xmm2 ^= xmm5
11045# asm 1: pxor <xmm5=int6464#4,<xmm2=int6464#2
11046# asm 2: pxor <xmm5=%xmm3,<xmm2=%xmm1
11047pxor %xmm3,%xmm1
11048
11049# qhasm: xmm1 ^= xmm6
11050# asm 1: pxor <xmm6=int6464#6,<xmm1=int6464#5
11051# asm 2: pxor <xmm6=%xmm5,<xmm1=%xmm4
11052pxor %xmm5,%xmm4
11053
11054# qhasm: xmm0 ^= xmm7
11055# asm 1: pxor <xmm7=int6464#8,<xmm0=int6464#7
11056# asm 2: pxor <xmm7=%xmm7,<xmm0=%xmm6
11057pxor %xmm7,%xmm6
11058
11059# qhasm: xmm4 = xmm3
11060# asm 1: movdqa <xmm3=int6464#1,>xmm4=int6464#3
11061# asm 2: movdqa <xmm3=%xmm0,>xmm4=%xmm2
11062movdqa %xmm0,%xmm2
11063
11064# qhasm: xmm4 ^= xmm2
11065# asm 1: pxor <xmm2=int6464#2,<xmm4=int6464#3
11066# asm 2: pxor <xmm2=%xmm1,<xmm4=%xmm2
11067pxor %xmm1,%xmm2
11068
11069# qhasm: xmm3 &= xmm1
11070# asm 1: pand <xmm1=int6464#5,<xmm3=int6464#1
11071# asm 2: pand <xmm1=%xmm4,<xmm3=%xmm0
11072pand %xmm4,%xmm0
11073
11074# qhasm: xmm6 = xmm0
11075# asm 1: movdqa <xmm0=int6464#7,>xmm6=int6464#4
11076# asm 2: movdqa <xmm0=%xmm6,>xmm6=%xmm3
11077movdqa %xmm6,%xmm3
11078
11079# qhasm: xmm6 ^= xmm3
11080# asm 1: pxor <xmm3=int6464#1,<xmm6=int6464#4
11081# asm 2: pxor <xmm3=%xmm0,<xmm6=%xmm3
11082pxor %xmm0,%xmm3
11083
11084# qhasm: xmm7 = xmm4
11085# asm 1: movdqa <xmm4=int6464#3,>xmm7=int6464#6
11086# asm 2: movdqa <xmm4=%xmm2,>xmm7=%xmm5
11087movdqa %xmm2,%xmm5
11088
11089# qhasm: xmm7 &= xmm6
11090# asm 1: pand <xmm6=int6464#4,<xmm7=int6464#6
11091# asm 2: pand <xmm6=%xmm3,<xmm7=%xmm5
11092pand %xmm3,%xmm5
11093
11094# qhasm: xmm7 ^= xmm2
11095# asm 1: pxor <xmm2=int6464#2,<xmm7=int6464#6
11096# asm 2: pxor <xmm2=%xmm1,<xmm7=%xmm5
11097pxor %xmm1,%xmm5
11098
11099# qhasm: xmm5 = xmm1
11100# asm 1: movdqa <xmm1=int6464#5,>xmm5=int6464#8
11101# asm 2: movdqa <xmm1=%xmm4,>xmm5=%xmm7
11102movdqa %xmm4,%xmm7
11103
11104# qhasm: xmm5 ^= xmm0
11105# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11106# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11107pxor %xmm6,%xmm7
11108
11109# qhasm: xmm3 ^= xmm2
11110# asm 1: pxor <xmm2=int6464#2,<xmm3=int6464#1
11111# asm 2: pxor <xmm2=%xmm1,<xmm3=%xmm0
11112pxor %xmm1,%xmm0
11113
11114# qhasm: xmm5 &= xmm3
11115# asm 1: pand <xmm3=int6464#1,<xmm5=int6464#8
11116# asm 2: pand <xmm3=%xmm0,<xmm5=%xmm7
11117pand %xmm0,%xmm7
11118
11119# qhasm: xmm5 ^= xmm0
11120# asm 1: pxor <xmm0=int6464#7,<xmm5=int6464#8
11121# asm 2: pxor <xmm0=%xmm6,<xmm5=%xmm7
11122pxor %xmm6,%xmm7
11123
11124# qhasm: xmm1 ^= xmm5
11125# asm 1: pxor <xmm5=int6464#8,<xmm1=int6464#5
11126# asm 2: pxor <xmm5=%xmm7,<xmm1=%xmm4
11127pxor %xmm7,%xmm4
11128
11129# qhasm: xmm2 = xmm6
11130# asm 1: movdqa <xmm6=int6464#4,>xmm2=int6464#1
11131# asm 2: movdqa <xmm6=%xmm3,>xmm2=%xmm0
11132movdqa %xmm3,%xmm0
11133
11134# qhasm: xmm2 ^= xmm5
11135# asm 1: pxor <xmm5=int6464#8,<xmm2=int6464#1
11136# asm 2: pxor <xmm5=%xmm7,<xmm2=%xmm0
11137pxor %xmm7,%xmm0
11138
11139# qhasm: xmm2 &= xmm0
11140# asm 1: pand <xmm0=int6464#7,<xmm2=int6464#1
11141# asm 2: pand <xmm0=%xmm6,<xmm2=%xmm0
11142pand %xmm6,%xmm0
11143
11144# qhasm: xmm1 ^= xmm2
11145# asm 1: pxor <xmm2=int6464#1,<xmm1=int6464#5
11146# asm 2: pxor <xmm2=%xmm0,<xmm1=%xmm4
11147pxor %xmm0,%xmm4
11148
11149# qhasm: xmm6 ^= xmm2
11150# asm 1: pxor <xmm2=int6464#1,<xmm6=int6464#4
11151# asm 2: pxor <xmm2=%xmm0,<xmm6=%xmm3
11152pxor %xmm0,%xmm3
11153
11154# qhasm: xmm6 &= xmm7
11155# asm 1: pand <xmm7=int6464#6,<xmm6=int6464#4
11156# asm 2: pand <xmm7=%xmm5,<xmm6=%xmm3
11157pand %xmm5,%xmm3
11158
11159# qhasm: xmm6 ^= xmm4
11160# asm 1: pxor <xmm4=int6464#3,<xmm6=int6464#4
11161# asm 2: pxor <xmm4=%xmm2,<xmm6=%xmm3
11162pxor %xmm2,%xmm3
11163
11164# qhasm: xmm4 = xmm14
11165# asm 1: movdqa <xmm14=int6464#15,>xmm4=int6464#1
11166# asm 2: movdqa <xmm14=%xmm14,>xmm4=%xmm0
11167movdqa %xmm14,%xmm0
11168
11169# qhasm: xmm0 = xmm13
11170# asm 1: movdqa <xmm13=int6464#14,>xmm0=int6464#2
11171# asm 2: movdqa <xmm13=%xmm13,>xmm0=%xmm1
11172movdqa %xmm13,%xmm1
11173
11174# qhasm: xmm2 = xmm7
11175# asm 1: movdqa <xmm7=int6464#6,>xmm2=int6464#3
11176# asm 2: movdqa <xmm7=%xmm5,>xmm2=%xmm2
11177movdqa %xmm5,%xmm2
11178
11179# qhasm: xmm2 ^= xmm6
11180# asm 1: pxor <xmm6=int6464#4,<xmm2=int6464#3
11181# asm 2: pxor <xmm6=%xmm3,<xmm2=%xmm2
11182pxor %xmm3,%xmm2
11183
11184# qhasm: xmm2 &= xmm14
11185# asm 1: pand <xmm14=int6464#15,<xmm2=int6464#3
11186# asm 2: pand <xmm14=%xmm14,<xmm2=%xmm2
11187pand %xmm14,%xmm2
11188
11189# qhasm: xmm14 ^= xmm13
11190# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11191# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11192pxor %xmm13,%xmm14
11193
11194# qhasm: xmm14 &= xmm6
11195# asm 1: pand <xmm6=int6464#4,<xmm14=int6464#15
11196# asm 2: pand <xmm6=%xmm3,<xmm14=%xmm14
11197pand %xmm3,%xmm14
11198
11199# qhasm: xmm13 &= xmm7
11200# asm 1: pand <xmm7=int6464#6,<xmm13=int6464#14
11201# asm 2: pand <xmm7=%xmm5,<xmm13=%xmm13
11202pand %xmm5,%xmm13
11203
11204# qhasm: xmm14 ^= xmm13
11205# asm 1: pxor <xmm13=int6464#14,<xmm14=int6464#15
11206# asm 2: pxor <xmm13=%xmm13,<xmm14=%xmm14
11207pxor %xmm13,%xmm14
11208
11209# qhasm: xmm13 ^= xmm2
11210# asm 1: pxor <xmm2=int6464#3,<xmm13=int6464#14
11211# asm 2: pxor <xmm2=%xmm2,<xmm13=%xmm13
11212pxor %xmm2,%xmm13
11213
11214# qhasm: xmm4 ^= xmm8
11215# asm 1: pxor <xmm8=int6464#9,<xmm4=int6464#1
11216# asm 2: pxor <xmm8=%xmm8,<xmm4=%xmm0
11217pxor %xmm8,%xmm0
11218
11219# qhasm: xmm0 ^= xmm11
11220# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#2
11221# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm1
11222pxor %xmm11,%xmm1
11223
11224# qhasm: xmm7 ^= xmm5
11225# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11226# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11227pxor %xmm7,%xmm5
11228
11229# qhasm: xmm6 ^= xmm1
11230# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11231# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11232pxor %xmm4,%xmm3
11233
11234# qhasm: xmm3 = xmm7
11235# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11236# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11237movdqa %xmm5,%xmm2
11238
11239# qhasm: xmm3 ^= xmm6
11240# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11241# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11242pxor %xmm3,%xmm2
11243
11244# qhasm: xmm3 &= xmm4
11245# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11246# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11247pand %xmm0,%xmm2
11248
11249# qhasm: xmm4 ^= xmm0
11250# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11251# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11252pxor %xmm1,%xmm0
11253
11254# qhasm: xmm4 &= xmm6
11255# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11256# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11257pand %xmm3,%xmm0
11258
11259# qhasm: xmm0 &= xmm7
11260# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11261# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11262pand %xmm5,%xmm1
11263
11264# qhasm: xmm0 ^= xmm4
11265# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11266# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11267pxor %xmm0,%xmm1
11268
11269# qhasm: xmm4 ^= xmm3
11270# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11271# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11272pxor %xmm2,%xmm0
11273
11274# qhasm: xmm2 = xmm5
11275# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11276# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11277movdqa %xmm7,%xmm2
11278
11279# qhasm: xmm2 ^= xmm1
11280# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11281# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11282pxor %xmm4,%xmm2
11283
11284# qhasm: xmm2 &= xmm8
11285# asm 1: pand <xmm8=int6464#9,<xmm2=int6464#3
11286# asm 2: pand <xmm8=%xmm8,<xmm2=%xmm2
11287pand %xmm8,%xmm2
11288
11289# qhasm: xmm8 ^= xmm11
11290# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11291# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11292pxor %xmm11,%xmm8
11293
11294# qhasm: xmm8 &= xmm1
11295# asm 1: pand <xmm1=int6464#5,<xmm8=int6464#9
11296# asm 2: pand <xmm1=%xmm4,<xmm8=%xmm8
11297pand %xmm4,%xmm8
11298
11299# qhasm: xmm11 &= xmm5
11300# asm 1: pand <xmm5=int6464#8,<xmm11=int6464#12
11301# asm 2: pand <xmm5=%xmm7,<xmm11=%xmm11
11302pand %xmm7,%xmm11
11303
11304# qhasm: xmm8 ^= xmm11
11305# asm 1: pxor <xmm11=int6464#12,<xmm8=int6464#9
11306# asm 2: pxor <xmm11=%xmm11,<xmm8=%xmm8
11307pxor %xmm11,%xmm8
11308
11309# qhasm: xmm11 ^= xmm2
11310# asm 1: pxor <xmm2=int6464#3,<xmm11=int6464#12
11311# asm 2: pxor <xmm2=%xmm2,<xmm11=%xmm11
11312pxor %xmm2,%xmm11
11313
11314# qhasm: xmm14 ^= xmm4
11315# asm 1: pxor <xmm4=int6464#1,<xmm14=int6464#15
11316# asm 2: pxor <xmm4=%xmm0,<xmm14=%xmm14
11317pxor %xmm0,%xmm14
11318
11319# qhasm: xmm8 ^= xmm4
11320# asm 1: pxor <xmm4=int6464#1,<xmm8=int6464#9
11321# asm 2: pxor <xmm4=%xmm0,<xmm8=%xmm8
11322pxor %xmm0,%xmm8
11323
11324# qhasm: xmm13 ^= xmm0
11325# asm 1: pxor <xmm0=int6464#2,<xmm13=int6464#14
11326# asm 2: pxor <xmm0=%xmm1,<xmm13=%xmm13
11327pxor %xmm1,%xmm13
11328
11329# qhasm: xmm11 ^= xmm0
11330# asm 1: pxor <xmm0=int6464#2,<xmm11=int6464#12
11331# asm 2: pxor <xmm0=%xmm1,<xmm11=%xmm11
11332pxor %xmm1,%xmm11
11333
11334# qhasm: xmm4 = xmm15
11335# asm 1: movdqa <xmm15=int6464#16,>xmm4=int6464#1
11336# asm 2: movdqa <xmm15=%xmm15,>xmm4=%xmm0
11337movdqa %xmm15,%xmm0
11338
11339# qhasm: xmm0 = xmm9
11340# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#2
11341# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm1
11342movdqa %xmm9,%xmm1
11343
11344# qhasm: xmm4 ^= xmm12
11345# asm 1: pxor <xmm12=int6464#13,<xmm4=int6464#1
11346# asm 2: pxor <xmm12=%xmm12,<xmm4=%xmm0
11347pxor %xmm12,%xmm0
11348
11349# qhasm: xmm0 ^= xmm10
11350# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#2
11351# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm1
11352pxor %xmm10,%xmm1
11353
11354# qhasm: xmm3 = xmm7
11355# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11356# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11357movdqa %xmm5,%xmm2
11358
11359# qhasm: xmm3 ^= xmm6
11360# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11361# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11362pxor %xmm3,%xmm2
11363
11364# qhasm: xmm3 &= xmm4
11365# asm 1: pand <xmm4=int6464#1,<xmm3=int6464#3
11366# asm 2: pand <xmm4=%xmm0,<xmm3=%xmm2
11367pand %xmm0,%xmm2
11368
11369# qhasm: xmm4 ^= xmm0
11370# asm 1: pxor <xmm0=int6464#2,<xmm4=int6464#1
11371# asm 2: pxor <xmm0=%xmm1,<xmm4=%xmm0
11372pxor %xmm1,%xmm0
11373
11374# qhasm: xmm4 &= xmm6
11375# asm 1: pand <xmm6=int6464#4,<xmm4=int6464#1
11376# asm 2: pand <xmm6=%xmm3,<xmm4=%xmm0
11377pand %xmm3,%xmm0
11378
11379# qhasm: xmm0 &= xmm7
11380# asm 1: pand <xmm7=int6464#6,<xmm0=int6464#2
11381# asm 2: pand <xmm7=%xmm5,<xmm0=%xmm1
11382pand %xmm5,%xmm1
11383
11384# qhasm: xmm0 ^= xmm4
11385# asm 1: pxor <xmm4=int6464#1,<xmm0=int6464#2
11386# asm 2: pxor <xmm4=%xmm0,<xmm0=%xmm1
11387pxor %xmm0,%xmm1
11388
11389# qhasm: xmm4 ^= xmm3
11390# asm 1: pxor <xmm3=int6464#3,<xmm4=int6464#1
11391# asm 2: pxor <xmm3=%xmm2,<xmm4=%xmm0
11392pxor %xmm2,%xmm0
11393
11394# qhasm: xmm2 = xmm5
11395# asm 1: movdqa <xmm5=int6464#8,>xmm2=int6464#3
11396# asm 2: movdqa <xmm5=%xmm7,>xmm2=%xmm2
11397movdqa %xmm7,%xmm2
11398
11399# qhasm: xmm2 ^= xmm1
11400# asm 1: pxor <xmm1=int6464#5,<xmm2=int6464#3
11401# asm 2: pxor <xmm1=%xmm4,<xmm2=%xmm2
11402pxor %xmm4,%xmm2
11403
11404# qhasm: xmm2 &= xmm12
11405# asm 1: pand <xmm12=int6464#13,<xmm2=int6464#3
11406# asm 2: pand <xmm12=%xmm12,<xmm2=%xmm2
11407pand %xmm12,%xmm2
11408
11409# qhasm: xmm12 ^= xmm10
11410# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11411# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11412pxor %xmm10,%xmm12
11413
11414# qhasm: xmm12 &= xmm1
11415# asm 1: pand <xmm1=int6464#5,<xmm12=int6464#13
11416# asm 2: pand <xmm1=%xmm4,<xmm12=%xmm12
11417pand %xmm4,%xmm12
11418
11419# qhasm: xmm10 &= xmm5
11420# asm 1: pand <xmm5=int6464#8,<xmm10=int6464#11
11421# asm 2: pand <xmm5=%xmm7,<xmm10=%xmm10
11422pand %xmm7,%xmm10
11423
11424# qhasm: xmm12 ^= xmm10
11425# asm 1: pxor <xmm10=int6464#11,<xmm12=int6464#13
11426# asm 2: pxor <xmm10=%xmm10,<xmm12=%xmm12
11427pxor %xmm10,%xmm12
11428
11429# qhasm: xmm10 ^= xmm2
11430# asm 1: pxor <xmm2=int6464#3,<xmm10=int6464#11
11431# asm 2: pxor <xmm2=%xmm2,<xmm10=%xmm10
11432pxor %xmm2,%xmm10
11433
11434# qhasm: xmm7 ^= xmm5
11435# asm 1: pxor <xmm5=int6464#8,<xmm7=int6464#6
11436# asm 2: pxor <xmm5=%xmm7,<xmm7=%xmm5
11437pxor %xmm7,%xmm5
11438
11439# qhasm: xmm6 ^= xmm1
11440# asm 1: pxor <xmm1=int6464#5,<xmm6=int6464#4
11441# asm 2: pxor <xmm1=%xmm4,<xmm6=%xmm3
11442pxor %xmm4,%xmm3
11443
11444# qhasm: xmm3 = xmm7
11445# asm 1: movdqa <xmm7=int6464#6,>xmm3=int6464#3
11446# asm 2: movdqa <xmm7=%xmm5,>xmm3=%xmm2
11447movdqa %xmm5,%xmm2
11448
11449# qhasm: xmm3 ^= xmm6
11450# asm 1: pxor <xmm6=int6464#4,<xmm3=int6464#3
11451# asm 2: pxor <xmm6=%xmm3,<xmm3=%xmm2
11452pxor %xmm3,%xmm2
11453
11454# qhasm: xmm3 &= xmm15
11455# asm 1: pand <xmm15=int6464#16,<xmm3=int6464#3
11456# asm 2: pand <xmm15=%xmm15,<xmm3=%xmm2
11457pand %xmm15,%xmm2
11458
11459# qhasm: xmm15 ^= xmm9
11460# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11461# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11462pxor %xmm9,%xmm15
11463
11464# qhasm: xmm15 &= xmm6
11465# asm 1: pand <xmm6=int6464#4,<xmm15=int6464#16
11466# asm 2: pand <xmm6=%xmm3,<xmm15=%xmm15
11467pand %xmm3,%xmm15
11468
11469# qhasm: xmm9 &= xmm7
11470# asm 1: pand <xmm7=int6464#6,<xmm9=int6464#10
11471# asm 2: pand <xmm7=%xmm5,<xmm9=%xmm9
11472pand %xmm5,%xmm9
11473
11474# qhasm: xmm15 ^= xmm9
11475# asm 1: pxor <xmm9=int6464#10,<xmm15=int6464#16
11476# asm 2: pxor <xmm9=%xmm9,<xmm15=%xmm15
11477pxor %xmm9,%xmm15
11478
11479# qhasm: xmm9 ^= xmm3
11480# asm 1: pxor <xmm3=int6464#3,<xmm9=int6464#10
11481# asm 2: pxor <xmm3=%xmm2,<xmm9=%xmm9
11482pxor %xmm2,%xmm9
11483
11484# qhasm: xmm15 ^= xmm4
11485# asm 1: pxor <xmm4=int6464#1,<xmm15=int6464#16
11486# asm 2: pxor <xmm4=%xmm0,<xmm15=%xmm15
11487pxor %xmm0,%xmm15
11488
11489# qhasm: xmm12 ^= xmm4
11490# asm 1: pxor <xmm4=int6464#1,<xmm12=int6464#13
11491# asm 2: pxor <xmm4=%xmm0,<xmm12=%xmm12
11492pxor %xmm0,%xmm12
11493
11494# qhasm: xmm9 ^= xmm0
11495# asm 1: pxor <xmm0=int6464#2,<xmm9=int6464#10
11496# asm 2: pxor <xmm0=%xmm1,<xmm9=%xmm9
11497pxor %xmm1,%xmm9
11498
11499# qhasm: xmm10 ^= xmm0
11500# asm 1: pxor <xmm0=int6464#2,<xmm10=int6464#11
11501# asm 2: pxor <xmm0=%xmm1,<xmm10=%xmm10
11502pxor %xmm1,%xmm10
11503
11504# qhasm: xmm15 ^= xmm8
11505# asm 1: pxor <xmm8=int6464#9,<xmm15=int6464#16
11506# asm 2: pxor <xmm8=%xmm8,<xmm15=%xmm15
11507pxor %xmm8,%xmm15
11508
11509# qhasm: xmm9 ^= xmm14
11510# asm 1: pxor <xmm14=int6464#15,<xmm9=int6464#10
11511# asm 2: pxor <xmm14=%xmm14,<xmm9=%xmm9
11512pxor %xmm14,%xmm9
11513
11514# qhasm: xmm12 ^= xmm15
11515# asm 1: pxor <xmm15=int6464#16,<xmm12=int6464#13
11516# asm 2: pxor <xmm15=%xmm15,<xmm12=%xmm12
11517pxor %xmm15,%xmm12
11518
11519# qhasm: xmm14 ^= xmm8
11520# asm 1: pxor <xmm8=int6464#9,<xmm14=int6464#15
11521# asm 2: pxor <xmm8=%xmm8,<xmm14=%xmm14
11522pxor %xmm8,%xmm14
11523
11524# qhasm: xmm8 ^= xmm9
11525# asm 1: pxor <xmm9=int6464#10,<xmm8=int6464#9
11526# asm 2: pxor <xmm9=%xmm9,<xmm8=%xmm8
11527pxor %xmm9,%xmm8
11528
11529# qhasm: xmm9 ^= xmm13
11530# asm 1: pxor <xmm13=int6464#14,<xmm9=int6464#10
11531# asm 2: pxor <xmm13=%xmm13,<xmm9=%xmm9
11532pxor %xmm13,%xmm9
11533
11534# qhasm: xmm13 ^= xmm10
11535# asm 1: pxor <xmm10=int6464#11,<xmm13=int6464#14
11536# asm 2: pxor <xmm10=%xmm10,<xmm13=%xmm13
11537pxor %xmm10,%xmm13
11538
11539# qhasm: xmm12 ^= xmm13
11540# asm 1: pxor <xmm13=int6464#14,<xmm12=int6464#13
11541# asm 2: pxor <xmm13=%xmm13,<xmm12=%xmm12
11542pxor %xmm13,%xmm12
11543
11544# qhasm: xmm10 ^= xmm11
11545# asm 1: pxor <xmm11=int6464#12,<xmm10=int6464#11
11546# asm 2: pxor <xmm11=%xmm11,<xmm10=%xmm10
11547pxor %xmm11,%xmm10
11548
11549# qhasm: xmm11 ^= xmm13
11550# asm 1: pxor <xmm13=int6464#14,<xmm11=int6464#12
11551# asm 2: pxor <xmm13=%xmm13,<xmm11=%xmm11
11552pxor %xmm13,%xmm11
11553
11554# qhasm: xmm14 ^= xmm11
11555# asm 1: pxor <xmm11=int6464#12,<xmm14=int6464#15
11556# asm 2: pxor <xmm11=%xmm11,<xmm14=%xmm14
11557pxor %xmm11,%xmm14
11558
11559# qhasm: xmm8 ^= *(int128 *)(c + 1280)
11560# asm 1: pxor 1280(<c=int64#5),<xmm8=int6464#9
11561# asm 2: pxor 1280(<c=%r8),<xmm8=%xmm8
11562pxor 1280(%r8),%xmm8
11563
11564# qhasm: xmm9 ^= *(int128 *)(c + 1296)
11565# asm 1: pxor 1296(<c=int64#5),<xmm9=int6464#10
11566# asm 2: pxor 1296(<c=%r8),<xmm9=%xmm9
11567pxor 1296(%r8),%xmm9
11568
11569# qhasm: xmm12 ^= *(int128 *)(c + 1312)
11570# asm 1: pxor 1312(<c=int64#5),<xmm12=int6464#13
11571# asm 2: pxor 1312(<c=%r8),<xmm12=%xmm12
11572pxor 1312(%r8),%xmm12
11573
11574# qhasm: xmm14 ^= *(int128 *)(c + 1328)
11575# asm 1: pxor 1328(<c=int64#5),<xmm14=int6464#15
11576# asm 2: pxor 1328(<c=%r8),<xmm14=%xmm14
11577pxor 1328(%r8),%xmm14
11578
11579# qhasm: xmm11 ^= *(int128 *)(c + 1344)
11580# asm 1: pxor 1344(<c=int64#5),<xmm11=int6464#12
11581# asm 2: pxor 1344(<c=%r8),<xmm11=%xmm11
11582pxor 1344(%r8),%xmm11
11583
11584# qhasm: xmm15 ^= *(int128 *)(c + 1360)
11585# asm 1: pxor 1360(<c=int64#5),<xmm15=int6464#16
11586# asm 2: pxor 1360(<c=%r8),<xmm15=%xmm15
11587pxor 1360(%r8),%xmm15
11588
11589# qhasm: xmm10 ^= *(int128 *)(c + 1376)
11590# asm 1: pxor 1376(<c=int64#5),<xmm10=int6464#11
11591# asm 2: pxor 1376(<c=%r8),<xmm10=%xmm10
11592pxor 1376(%r8),%xmm10
11593
11594# qhasm: xmm13 ^= *(int128 *)(c + 1392)
11595# asm 1: pxor 1392(<c=int64#5),<xmm13=int6464#14
11596# asm 2: pxor 1392(<c=%r8),<xmm13=%xmm13
11597pxor 1392(%r8),%xmm13
11598
11599# qhasm: xmm0 = xmm10
11600# asm 1: movdqa <xmm10=int6464#11,>xmm0=int6464#1
11601# asm 2: movdqa <xmm10=%xmm10,>xmm0=%xmm0
11602movdqa %xmm10,%xmm0
11603
11604# qhasm: uint6464 xmm0 >>= 1
11605# asm 1: psrlq $1,<xmm0=int6464#1
11606# asm 2: psrlq $1,<xmm0=%xmm0
11607psrlq $1,%xmm0
11608
11609# qhasm: xmm0 ^= xmm13
11610# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11611# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11612pxor %xmm13,%xmm0
11613
11614# qhasm: xmm0 &= BS0
11615# asm 1: pand BS0,<xmm0=int6464#1
11616# asm 2: pand BS0,<xmm0=%xmm0
11617pand BS0,%xmm0
11618
11619# qhasm: xmm13 ^= xmm0
11620# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11621# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11622pxor %xmm0,%xmm13
11623
11624# qhasm: uint6464 xmm0 <<= 1
11625# asm 1: psllq $1,<xmm0=int6464#1
11626# asm 2: psllq $1,<xmm0=%xmm0
11627psllq $1,%xmm0
11628
11629# qhasm: xmm10 ^= xmm0
11630# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11631# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11632pxor %xmm0,%xmm10
11633
11634# qhasm: xmm0 = xmm11
11635# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11636# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11637movdqa %xmm11,%xmm0
11638
11639# qhasm: uint6464 xmm0 >>= 1
11640# asm 1: psrlq $1,<xmm0=int6464#1
11641# asm 2: psrlq $1,<xmm0=%xmm0
11642psrlq $1,%xmm0
11643
11644# qhasm: xmm0 ^= xmm15
11645# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11646# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11647pxor %xmm15,%xmm0
11648
11649# qhasm: xmm0 &= BS0
11650# asm 1: pand BS0,<xmm0=int6464#1
11651# asm 2: pand BS0,<xmm0=%xmm0
11652pand BS0,%xmm0
11653
11654# qhasm: xmm15 ^= xmm0
11655# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11656# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11657pxor %xmm0,%xmm15
11658
11659# qhasm: uint6464 xmm0 <<= 1
11660# asm 1: psllq $1,<xmm0=int6464#1
11661# asm 2: psllq $1,<xmm0=%xmm0
11662psllq $1,%xmm0
11663
11664# qhasm: xmm11 ^= xmm0
11665# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11666# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11667pxor %xmm0,%xmm11
11668
11669# qhasm: xmm0 = xmm12
11670# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11671# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11672movdqa %xmm12,%xmm0
11673
11674# qhasm: uint6464 xmm0 >>= 1
11675# asm 1: psrlq $1,<xmm0=int6464#1
11676# asm 2: psrlq $1,<xmm0=%xmm0
11677psrlq $1,%xmm0
11678
11679# qhasm: xmm0 ^= xmm14
11680# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11681# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11682pxor %xmm14,%xmm0
11683
11684# qhasm: xmm0 &= BS0
11685# asm 1: pand BS0,<xmm0=int6464#1
11686# asm 2: pand BS0,<xmm0=%xmm0
11687pand BS0,%xmm0
11688
11689# qhasm: xmm14 ^= xmm0
11690# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11691# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11692pxor %xmm0,%xmm14
11693
11694# qhasm: uint6464 xmm0 <<= 1
11695# asm 1: psllq $1,<xmm0=int6464#1
11696# asm 2: psllq $1,<xmm0=%xmm0
11697psllq $1,%xmm0
11698
11699# qhasm: xmm12 ^= xmm0
11700# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11701# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11702pxor %xmm0,%xmm12
11703
11704# qhasm: xmm0 = xmm8
11705# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11706# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11707movdqa %xmm8,%xmm0
11708
11709# qhasm: uint6464 xmm0 >>= 1
11710# asm 1: psrlq $1,<xmm0=int6464#1
11711# asm 2: psrlq $1,<xmm0=%xmm0
11712psrlq $1,%xmm0
11713
11714# qhasm: xmm0 ^= xmm9
11715# asm 1: pxor <xmm9=int6464#10,<xmm0=int6464#1
11716# asm 2: pxor <xmm9=%xmm9,<xmm0=%xmm0
11717pxor %xmm9,%xmm0
11718
11719# qhasm: xmm0 &= BS0
11720# asm 1: pand BS0,<xmm0=int6464#1
11721# asm 2: pand BS0,<xmm0=%xmm0
11722pand BS0,%xmm0
11723
11724# qhasm: xmm9 ^= xmm0
11725# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11726# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11727pxor %xmm0,%xmm9
11728
11729# qhasm: uint6464 xmm0 <<= 1
11730# asm 1: psllq $1,<xmm0=int6464#1
11731# asm 2: psllq $1,<xmm0=%xmm0
11732psllq $1,%xmm0
11733
11734# qhasm: xmm8 ^= xmm0
11735# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11736# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11737pxor %xmm0,%xmm8
11738
11739# qhasm: xmm0 = xmm15
11740# asm 1: movdqa <xmm15=int6464#16,>xmm0=int6464#1
11741# asm 2: movdqa <xmm15=%xmm15,>xmm0=%xmm0
11742movdqa %xmm15,%xmm0
11743
11744# qhasm: uint6464 xmm0 >>= 2
11745# asm 1: psrlq $2,<xmm0=int6464#1
11746# asm 2: psrlq $2,<xmm0=%xmm0
11747psrlq $2,%xmm0
11748
11749# qhasm: xmm0 ^= xmm13
11750# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11751# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11752pxor %xmm13,%xmm0
11753
11754# qhasm: xmm0 &= BS1
11755# asm 1: pand BS1,<xmm0=int6464#1
11756# asm 2: pand BS1,<xmm0=%xmm0
11757pand BS1,%xmm0
11758
11759# qhasm: xmm13 ^= xmm0
11760# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11761# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11762pxor %xmm0,%xmm13
11763
11764# qhasm: uint6464 xmm0 <<= 2
11765# asm 1: psllq $2,<xmm0=int6464#1
11766# asm 2: psllq $2,<xmm0=%xmm0
11767psllq $2,%xmm0
11768
11769# qhasm: xmm15 ^= xmm0
11770# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11771# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11772pxor %xmm0,%xmm15
11773
11774# qhasm: xmm0 = xmm11
11775# asm 1: movdqa <xmm11=int6464#12,>xmm0=int6464#1
11776# asm 2: movdqa <xmm11=%xmm11,>xmm0=%xmm0
11777movdqa %xmm11,%xmm0
11778
11779# qhasm: uint6464 xmm0 >>= 2
11780# asm 1: psrlq $2,<xmm0=int6464#1
11781# asm 2: psrlq $2,<xmm0=%xmm0
11782psrlq $2,%xmm0
11783
11784# qhasm: xmm0 ^= xmm10
11785# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11786# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11787pxor %xmm10,%xmm0
11788
11789# qhasm: xmm0 &= BS1
11790# asm 1: pand BS1,<xmm0=int6464#1
11791# asm 2: pand BS1,<xmm0=%xmm0
11792pand BS1,%xmm0
11793
11794# qhasm: xmm10 ^= xmm0
11795# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11796# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11797pxor %xmm0,%xmm10
11798
11799# qhasm: uint6464 xmm0 <<= 2
11800# asm 1: psllq $2,<xmm0=int6464#1
11801# asm 2: psllq $2,<xmm0=%xmm0
11802psllq $2,%xmm0
11803
11804# qhasm: xmm11 ^= xmm0
11805# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
11806# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
11807pxor %xmm0,%xmm11
11808
11809# qhasm: xmm0 = xmm9
11810# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11811# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11812movdqa %xmm9,%xmm0
11813
11814# qhasm: uint6464 xmm0 >>= 2
11815# asm 1: psrlq $2,<xmm0=int6464#1
11816# asm 2: psrlq $2,<xmm0=%xmm0
11817psrlq $2,%xmm0
11818
11819# qhasm: xmm0 ^= xmm14
11820# asm 1: pxor <xmm14=int6464#15,<xmm0=int6464#1
11821# asm 2: pxor <xmm14=%xmm14,<xmm0=%xmm0
11822pxor %xmm14,%xmm0
11823
11824# qhasm: xmm0 &= BS1
11825# asm 1: pand BS1,<xmm0=int6464#1
11826# asm 2: pand BS1,<xmm0=%xmm0
11827pand BS1,%xmm0
11828
11829# qhasm: xmm14 ^= xmm0
11830# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11831# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11832pxor %xmm0,%xmm14
11833
11834# qhasm: uint6464 xmm0 <<= 2
11835# asm 1: psllq $2,<xmm0=int6464#1
11836# asm 2: psllq $2,<xmm0=%xmm0
11837psllq $2,%xmm0
11838
11839# qhasm: xmm9 ^= xmm0
11840# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11841# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11842pxor %xmm0,%xmm9
11843
11844# qhasm: xmm0 = xmm8
11845# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11846# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11847movdqa %xmm8,%xmm0
11848
11849# qhasm: uint6464 xmm0 >>= 2
11850# asm 1: psrlq $2,<xmm0=int6464#1
11851# asm 2: psrlq $2,<xmm0=%xmm0
11852psrlq $2,%xmm0
11853
11854# qhasm: xmm0 ^= xmm12
11855# asm 1: pxor <xmm12=int6464#13,<xmm0=int6464#1
11856# asm 2: pxor <xmm12=%xmm12,<xmm0=%xmm0
11857pxor %xmm12,%xmm0
11858
11859# qhasm: xmm0 &= BS1
11860# asm 1: pand BS1,<xmm0=int6464#1
11861# asm 2: pand BS1,<xmm0=%xmm0
11862pand BS1,%xmm0
11863
11864# qhasm: xmm12 ^= xmm0
11865# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11866# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11867pxor %xmm0,%xmm12
11868
11869# qhasm: uint6464 xmm0 <<= 2
11870# asm 1: psllq $2,<xmm0=int6464#1
11871# asm 2: psllq $2,<xmm0=%xmm0
11872psllq $2,%xmm0
11873
11874# qhasm: xmm8 ^= xmm0
11875# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
11876# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
11877pxor %xmm0,%xmm8
11878
11879# qhasm: xmm0 = xmm14
11880# asm 1: movdqa <xmm14=int6464#15,>xmm0=int6464#1
11881# asm 2: movdqa <xmm14=%xmm14,>xmm0=%xmm0
11882movdqa %xmm14,%xmm0
11883
11884# qhasm: uint6464 xmm0 >>= 4
11885# asm 1: psrlq $4,<xmm0=int6464#1
11886# asm 2: psrlq $4,<xmm0=%xmm0
11887psrlq $4,%xmm0
11888
11889# qhasm: xmm0 ^= xmm13
11890# asm 1: pxor <xmm13=int6464#14,<xmm0=int6464#1
11891# asm 2: pxor <xmm13=%xmm13,<xmm0=%xmm0
11892pxor %xmm13,%xmm0
11893
11894# qhasm: xmm0 &= BS2
11895# asm 1: pand BS2,<xmm0=int6464#1
11896# asm 2: pand BS2,<xmm0=%xmm0
11897pand BS2,%xmm0
11898
11899# qhasm: xmm13 ^= xmm0
11900# asm 1: pxor <xmm0=int6464#1,<xmm13=int6464#14
11901# asm 2: pxor <xmm0=%xmm0,<xmm13=%xmm13
11902pxor %xmm0,%xmm13
11903
11904# qhasm: uint6464 xmm0 <<= 4
11905# asm 1: psllq $4,<xmm0=int6464#1
11906# asm 2: psllq $4,<xmm0=%xmm0
11907psllq $4,%xmm0
11908
11909# qhasm: xmm14 ^= xmm0
11910# asm 1: pxor <xmm0=int6464#1,<xmm14=int6464#15
11911# asm 2: pxor <xmm0=%xmm0,<xmm14=%xmm14
11912pxor %xmm0,%xmm14
11913
11914# qhasm: xmm0 = xmm12
11915# asm 1: movdqa <xmm12=int6464#13,>xmm0=int6464#1
11916# asm 2: movdqa <xmm12=%xmm12,>xmm0=%xmm0
11917movdqa %xmm12,%xmm0
11918
11919# qhasm: uint6464 xmm0 >>= 4
11920# asm 1: psrlq $4,<xmm0=int6464#1
11921# asm 2: psrlq $4,<xmm0=%xmm0
11922psrlq $4,%xmm0
11923
11924# qhasm: xmm0 ^= xmm10
11925# asm 1: pxor <xmm10=int6464#11,<xmm0=int6464#1
11926# asm 2: pxor <xmm10=%xmm10,<xmm0=%xmm0
11927pxor %xmm10,%xmm0
11928
11929# qhasm: xmm0 &= BS2
11930# asm 1: pand BS2,<xmm0=int6464#1
11931# asm 2: pand BS2,<xmm0=%xmm0
11932pand BS2,%xmm0
11933
11934# qhasm: xmm10 ^= xmm0
11935# asm 1: pxor <xmm0=int6464#1,<xmm10=int6464#11
11936# asm 2: pxor <xmm0=%xmm0,<xmm10=%xmm10
11937pxor %xmm0,%xmm10
11938
11939# qhasm: uint6464 xmm0 <<= 4
11940# asm 1: psllq $4,<xmm0=int6464#1
11941# asm 2: psllq $4,<xmm0=%xmm0
11942psllq $4,%xmm0
11943
11944# qhasm: xmm12 ^= xmm0
11945# asm 1: pxor <xmm0=int6464#1,<xmm12=int6464#13
11946# asm 2: pxor <xmm0=%xmm0,<xmm12=%xmm12
11947pxor %xmm0,%xmm12
11948
11949# qhasm: xmm0 = xmm9
11950# asm 1: movdqa <xmm9=int6464#10,>xmm0=int6464#1
11951# asm 2: movdqa <xmm9=%xmm9,>xmm0=%xmm0
11952movdqa %xmm9,%xmm0
11953
11954# qhasm: uint6464 xmm0 >>= 4
11955# asm 1: psrlq $4,<xmm0=int6464#1
11956# asm 2: psrlq $4,<xmm0=%xmm0
11957psrlq $4,%xmm0
11958
11959# qhasm: xmm0 ^= xmm15
11960# asm 1: pxor <xmm15=int6464#16,<xmm0=int6464#1
11961# asm 2: pxor <xmm15=%xmm15,<xmm0=%xmm0
11962pxor %xmm15,%xmm0
11963
11964# qhasm: xmm0 &= BS2
11965# asm 1: pand BS2,<xmm0=int6464#1
11966# asm 2: pand BS2,<xmm0=%xmm0
11967pand BS2,%xmm0
11968
11969# qhasm: xmm15 ^= xmm0
11970# asm 1: pxor <xmm0=int6464#1,<xmm15=int6464#16
11971# asm 2: pxor <xmm0=%xmm0,<xmm15=%xmm15
11972pxor %xmm0,%xmm15
11973
11974# qhasm: uint6464 xmm0 <<= 4
11975# asm 1: psllq $4,<xmm0=int6464#1
11976# asm 2: psllq $4,<xmm0=%xmm0
11977psllq $4,%xmm0
11978
11979# qhasm: xmm9 ^= xmm0
11980# asm 1: pxor <xmm0=int6464#1,<xmm9=int6464#10
11981# asm 2: pxor <xmm0=%xmm0,<xmm9=%xmm9
11982pxor %xmm0,%xmm9
11983
11984# qhasm: xmm0 = xmm8
11985# asm 1: movdqa <xmm8=int6464#9,>xmm0=int6464#1
11986# asm 2: movdqa <xmm8=%xmm8,>xmm0=%xmm0
11987movdqa %xmm8,%xmm0
11988
11989# qhasm: uint6464 xmm0 >>= 4
11990# asm 1: psrlq $4,<xmm0=int6464#1
11991# asm 2: psrlq $4,<xmm0=%xmm0
11992psrlq $4,%xmm0
11993
11994# qhasm: xmm0 ^= xmm11
11995# asm 1: pxor <xmm11=int6464#12,<xmm0=int6464#1
11996# asm 2: pxor <xmm11=%xmm11,<xmm0=%xmm0
11997pxor %xmm11,%xmm0
11998
11999# qhasm: xmm0 &= BS2
12000# asm 1: pand BS2,<xmm0=int6464#1
12001# asm 2: pand BS2,<xmm0=%xmm0
12002pand BS2,%xmm0
12003
12004# qhasm: xmm11 ^= xmm0
12005# asm 1: pxor <xmm0=int6464#1,<xmm11=int6464#12
12006# asm 2: pxor <xmm0=%xmm0,<xmm11=%xmm11
12007pxor %xmm0,%xmm11
12008
12009# qhasm: uint6464 xmm0 <<= 4
12010# asm 1: psllq $4,<xmm0=int6464#1
12011# asm 2: psllq $4,<xmm0=%xmm0
12012psllq $4,%xmm0
12013
12014# qhasm: xmm8 ^= xmm0
12015# asm 1: pxor <xmm0=int6464#1,<xmm8=int6464#9
12016# asm 2: pxor <xmm0=%xmm0,<xmm8=%xmm8
12017pxor %xmm0,%xmm8
12018
12019# qhasm: unsigned<? =? len-128
12020# asm 1: cmp $128,<len=int64#3
12021# asm 2: cmp $128,<len=%rdx
12022cmp $128,%rdx
12023# comment:fp stack unchanged by jump
12024
12025# qhasm: goto partial if unsigned<
12026jb ._partial
12027# comment:fp stack unchanged by jump
12028
12029# qhasm: goto full if =
12030je ._full
12031
12032# qhasm: tmp = *(uint32 *)(np + 12)
12033# asm 1: movl 12(<np=int64#4),>tmp=int64#6d
12034# asm 2: movl 12(<np=%rcx),>tmp=%r9d
12035movl 12(%rcx),%r9d
12036
12037# qhasm: (uint32) bswap tmp
12038# asm 1: bswap <tmp=int64#6d
12039# asm 2: bswap <tmp=%r9d
12040bswap %r9d
12041
12042# qhasm: tmp += 8
12043# asm 1: add $8,<tmp=int64#6
12044# asm 2: add $8,<tmp=%r9
12045add $8,%r9
12046
12047# qhasm: (uint32) bswap tmp
12048# asm 1: bswap <tmp=int64#6d
12049# asm 2: bswap <tmp=%r9d
12050bswap %r9d
12051
12052# qhasm: *(uint32 *)(np + 12) = tmp
12053# asm 1: movl <tmp=int64#6d,12(<np=int64#4)
12054# asm 2: movl <tmp=%r9d,12(<np=%rcx)
12055movl %r9d,12(%rcx)
12056
12057# qhasm: xmm8 ^= *(int128 *)(inp + 0)
12058# asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9
12059# asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8
12060pxor 0(%rsi),%xmm8
12061
12062# qhasm: xmm9 ^= *(int128 *)(inp + 16)
12063# asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10
12064# asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9
12065pxor 16(%rsi),%xmm9
12066
12067# qhasm: xmm12 ^= *(int128 *)(inp + 32)
12068# asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13
12069# asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12
12070pxor 32(%rsi),%xmm12
12071
12072# qhasm: xmm14 ^= *(int128 *)(inp + 48)
12073# asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15
12074# asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14
12075pxor 48(%rsi),%xmm14
12076
12077# qhasm: xmm11 ^= *(int128 *)(inp + 64)
12078# asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12
12079# asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11
12080pxor 64(%rsi),%xmm11
12081
12082# qhasm: xmm15 ^= *(int128 *)(inp + 80)
12083# asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16
12084# asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15
12085pxor 80(%rsi),%xmm15
12086
12087# qhasm: xmm10 ^= *(int128 *)(inp + 96)
12088# asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11
12089# asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10
12090pxor 96(%rsi),%xmm10
12091
12092# qhasm: xmm13 ^= *(int128 *)(inp + 112)
12093# asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14
12094# asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13
12095pxor 112(%rsi),%xmm13
12096
12097# qhasm: *(int128 *) (outp + 0) = xmm8
12098# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12099# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12100movdqa %xmm8,0(%rdi)
12101
12102# qhasm: *(int128 *) (outp + 16) = xmm9
12103# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12104# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12105movdqa %xmm9,16(%rdi)
12106
12107# qhasm: *(int128 *) (outp + 32) = xmm12
12108# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12109# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12110movdqa %xmm12,32(%rdi)
12111
12112# qhasm: *(int128 *) (outp + 48) = xmm14
12113# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12114# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12115movdqa %xmm14,48(%rdi)
12116
12117# qhasm: *(int128 *) (outp + 64) = xmm11
12118# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12119# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12120movdqa %xmm11,64(%rdi)
12121
12122# qhasm: *(int128 *) (outp + 80) = xmm15
12123# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12124# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12125movdqa %xmm15,80(%rdi)
12126
12127# qhasm: *(int128 *) (outp + 96) = xmm10
12128# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12129# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12130movdqa %xmm10,96(%rdi)
12131
12132# qhasm: *(int128 *) (outp + 112) = xmm13
12133# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12134# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12135movdqa %xmm13,112(%rdi)
12136
12137# qhasm: len -= 128
12138# asm 1: sub $128,<len=int64#3
12139# asm 2: sub $128,<len=%rdx
12140sub $128,%rdx
12141
12142# qhasm: inp += 128
12143# asm 1: add $128,<inp=int64#2
12144# asm 2: add $128,<inp=%rsi
12145add $128,%rsi
12146
12147# qhasm: outp += 128
12148# asm 1: add $128,<outp=int64#1
12149# asm 2: add $128,<outp=%rdi
12150add $128,%rdi
12151# comment:fp stack unchanged by jump
12152
12153# qhasm: goto enc_block
12154jmp ._enc_block
12155
12156# qhasm: partial:
12157._partial:
12158
12159# qhasm: lensav = len
12160# asm 1: mov <len=int64#3,>lensav=int64#5
12161# asm 2: mov <len=%rdx,>lensav=%r8
12162mov %rdx,%r8
12163
12164# qhasm: (uint32) len >>= 4
12165# asm 1: shr $4,<len=int64#3d
12166# asm 2: shr $4,<len=%edx
12167shr $4,%edx
12168
12169# qhasm: tmp = *(uint32 *)(np + 12)
12170# asm 1: movl 12(<np=int64#4),>tmp=int64#6d
12171# asm 2: movl 12(<np=%rcx),>tmp=%r9d
12172movl 12(%rcx),%r9d
12173
12174# qhasm: (uint32) bswap tmp
12175# asm 1: bswap <tmp=int64#6d
12176# asm 2: bswap <tmp=%r9d
12177bswap %r9d
12178
12179# qhasm: tmp += len
12180# asm 1: add <len=int64#3,<tmp=int64#6
12181# asm 2: add <len=%rdx,<tmp=%r9
12182add %rdx,%r9
12183
12184# qhasm: (uint32) bswap tmp
12185# asm 1: bswap <tmp=int64#6d
12186# asm 2: bswap <tmp=%r9d
12187bswap %r9d
12188
12189# qhasm: *(uint32 *)(np + 12) = tmp
12190# asm 1: movl <tmp=int64#6d,12(<np=int64#4)
12191# asm 2: movl <tmp=%r9d,12(<np=%rcx)
12192movl %r9d,12(%rcx)
12193
12194# qhasm: blp = &bl
12195# asm 1: leaq <bl=stack1024#1,>blp=int64#3
12196# asm 2: leaq <bl=32(%rsp),>blp=%rdx
12197leaq 32(%rsp),%rdx
12198
12199# qhasm: *(int128 *)(blp + 0) = xmm8
12200# asm 1: movdqa <xmm8=int6464#9,0(<blp=int64#3)
12201# asm 2: movdqa <xmm8=%xmm8,0(<blp=%rdx)
12202movdqa %xmm8,0(%rdx)
12203
12204# qhasm: *(int128 *)(blp + 16) = xmm9
12205# asm 1: movdqa <xmm9=int6464#10,16(<blp=int64#3)
12206# asm 2: movdqa <xmm9=%xmm9,16(<blp=%rdx)
12207movdqa %xmm9,16(%rdx)
12208
12209# qhasm: *(int128 *)(blp + 32) = xmm12
12210# asm 1: movdqa <xmm12=int6464#13,32(<blp=int64#3)
12211# asm 2: movdqa <xmm12=%xmm12,32(<blp=%rdx)
12212movdqa %xmm12,32(%rdx)
12213
12214# qhasm: *(int128 *)(blp + 48) = xmm14
12215# asm 1: movdqa <xmm14=int6464#15,48(<blp=int64#3)
12216# asm 2: movdqa <xmm14=%xmm14,48(<blp=%rdx)
12217movdqa %xmm14,48(%rdx)
12218
12219# qhasm: *(int128 *)(blp + 64) = xmm11
12220# asm 1: movdqa <xmm11=int6464#12,64(<blp=int64#3)
12221# asm 2: movdqa <xmm11=%xmm11,64(<blp=%rdx)
12222movdqa %xmm11,64(%rdx)
12223
12224# qhasm: *(int128 *)(blp + 80) = xmm15
12225# asm 1: movdqa <xmm15=int6464#16,80(<blp=int64#3)
12226# asm 2: movdqa <xmm15=%xmm15,80(<blp=%rdx)
12227movdqa %xmm15,80(%rdx)
12228
12229# qhasm: *(int128 *)(blp + 96) = xmm10
12230# asm 1: movdqa <xmm10=int6464#11,96(<blp=int64#3)
12231# asm 2: movdqa <xmm10=%xmm10,96(<blp=%rdx)
12232movdqa %xmm10,96(%rdx)
12233
12234# qhasm: *(int128 *)(blp + 112) = xmm13
12235# asm 1: movdqa <xmm13=int6464#14,112(<blp=int64#3)
12236# asm 2: movdqa <xmm13=%xmm13,112(<blp=%rdx)
12237movdqa %xmm13,112(%rdx)
12238
12239# qhasm: bytes:
12240._bytes:
12241
12242# qhasm: =? lensav-0
12243# asm 1: cmp $0,<lensav=int64#5
12244# asm 2: cmp $0,<lensav=%r8
12245cmp $0,%r8
12246# comment:fp stack unchanged by jump
12247
12248# qhasm: goto end if =
12249je ._end
12250
12251# qhasm: b = *(uint8 *)(blp + 0)
12252# asm 1: movzbq 0(<blp=int64#3),>b=int64#4
12253# asm 2: movzbq 0(<blp=%rdx),>b=%rcx
12254movzbq 0(%rdx),%rcx
12255
12256# qhasm: (uint8) b ^= *(uint8 *)(inp + 0)
12257# asm 1: xorb 0(<inp=int64#2),<b=int64#4b
12258# asm 2: xorb 0(<inp=%rsi),<b=%cl
12259xorb 0(%rsi),%cl
12260
12261# qhasm: *(uint8 *)(outp + 0) = b
12262# asm 1: movb <b=int64#4b,0(<outp=int64#1)
12263# asm 2: movb <b=%cl,0(<outp=%rdi)
12264movb %cl,0(%rdi)
12265
12266# qhasm: blp += 1
12267# asm 1: add $1,<blp=int64#3
12268# asm 2: add $1,<blp=%rdx
12269add $1,%rdx
12270
12271# qhasm: inp +=1
12272# asm 1: add $1,<inp=int64#2
12273# asm 2: add $1,<inp=%rsi
12274add $1,%rsi
12275
12276# qhasm: outp +=1
12277# asm 1: add $1,<outp=int64#1
12278# asm 2: add $1,<outp=%rdi
12279add $1,%rdi
12280
12281# qhasm: lensav -= 1
12282# asm 1: sub $1,<lensav=int64#5
12283# asm 2: sub $1,<lensav=%r8
12284sub $1,%r8
12285# comment:fp stack unchanged by jump
12286
12287# qhasm: goto bytes
12288jmp ._bytes
12289
12290# qhasm: full:
12291._full:
12292
12293# qhasm: tmp = *(uint32 *)(np + 12)
12294# asm 1: movl 12(<np=int64#4),>tmp=int64#3d
12295# asm 2: movl 12(<np=%rcx),>tmp=%edx
12296movl 12(%rcx),%edx
12297
12298# qhasm: (uint32) bswap tmp
12299# asm 1: bswap <tmp=int64#3d
12300# asm 2: bswap <tmp=%edx
12301bswap %edx
12302
12303# qhasm: tmp += 8
12304# asm 1: add $8,<tmp=int64#3
12305# asm 2: add $8,<tmp=%rdx
12306add $8,%rdx
12307
12308# qhasm: (uint32) bswap tmp
12309# asm 1: bswap <tmp=int64#3d
12310# asm 2: bswap <tmp=%edx
12311bswap %edx
12312
12313# qhasm: *(uint32 *)(np + 12) = tmp
12314# asm 1: movl <tmp=int64#3d,12(<np=int64#4)
12315# asm 2: movl <tmp=%edx,12(<np=%rcx)
12316movl %edx,12(%rcx)
12317
12318# qhasm: xmm8 ^= *(int128 *)(inp + 0)
12319# asm 1: pxor 0(<inp=int64#2),<xmm8=int6464#9
12320# asm 2: pxor 0(<inp=%rsi),<xmm8=%xmm8
12321pxor 0(%rsi),%xmm8
12322
12323# qhasm: xmm9 ^= *(int128 *)(inp + 16)
12324# asm 1: pxor 16(<inp=int64#2),<xmm9=int6464#10
12325# asm 2: pxor 16(<inp=%rsi),<xmm9=%xmm9
12326pxor 16(%rsi),%xmm9
12327
12328# qhasm: xmm12 ^= *(int128 *)(inp + 32)
12329# asm 1: pxor 32(<inp=int64#2),<xmm12=int6464#13
12330# asm 2: pxor 32(<inp=%rsi),<xmm12=%xmm12
12331pxor 32(%rsi),%xmm12
12332
12333# qhasm: xmm14 ^= *(int128 *)(inp + 48)
12334# asm 1: pxor 48(<inp=int64#2),<xmm14=int6464#15
12335# asm 2: pxor 48(<inp=%rsi),<xmm14=%xmm14
12336pxor 48(%rsi),%xmm14
12337
12338# qhasm: xmm11 ^= *(int128 *)(inp + 64)
12339# asm 1: pxor 64(<inp=int64#2),<xmm11=int6464#12
12340# asm 2: pxor 64(<inp=%rsi),<xmm11=%xmm11
12341pxor 64(%rsi),%xmm11
12342
12343# qhasm: xmm15 ^= *(int128 *)(inp + 80)
12344# asm 1: pxor 80(<inp=int64#2),<xmm15=int6464#16
12345# asm 2: pxor 80(<inp=%rsi),<xmm15=%xmm15
12346pxor 80(%rsi),%xmm15
12347
12348# qhasm: xmm10 ^= *(int128 *)(inp + 96)
12349# asm 1: pxor 96(<inp=int64#2),<xmm10=int6464#11
12350# asm 2: pxor 96(<inp=%rsi),<xmm10=%xmm10
12351pxor 96(%rsi),%xmm10
12352
12353# qhasm: xmm13 ^= *(int128 *)(inp + 112)
12354# asm 1: pxor 112(<inp=int64#2),<xmm13=int6464#14
12355# asm 2: pxor 112(<inp=%rsi),<xmm13=%xmm13
12356pxor 112(%rsi),%xmm13
12357
12358# qhasm: *(int128 *) (outp + 0) = xmm8
12359# asm 1: movdqa <xmm8=int6464#9,0(<outp=int64#1)
12360# asm 2: movdqa <xmm8=%xmm8,0(<outp=%rdi)
12361movdqa %xmm8,0(%rdi)
12362
12363# qhasm: *(int128 *) (outp + 16) = xmm9
12364# asm 1: movdqa <xmm9=int6464#10,16(<outp=int64#1)
12365# asm 2: movdqa <xmm9=%xmm9,16(<outp=%rdi)
12366movdqa %xmm9,16(%rdi)
12367
12368# qhasm: *(int128 *) (outp + 32) = xmm12
12369# asm 1: movdqa <xmm12=int6464#13,32(<outp=int64#1)
12370# asm 2: movdqa <xmm12=%xmm12,32(<outp=%rdi)
12371movdqa %xmm12,32(%rdi)
12372
12373# qhasm: *(int128 *) (outp + 48) = xmm14
12374# asm 1: movdqa <xmm14=int6464#15,48(<outp=int64#1)
12375# asm 2: movdqa <xmm14=%xmm14,48(<outp=%rdi)
12376movdqa %xmm14,48(%rdi)
12377
12378# qhasm: *(int128 *) (outp + 64) = xmm11
12379# asm 1: movdqa <xmm11=int6464#12,64(<outp=int64#1)
12380# asm 2: movdqa <xmm11=%xmm11,64(<outp=%rdi)
12381movdqa %xmm11,64(%rdi)
12382
12383# qhasm: *(int128 *) (outp + 80) = xmm15
12384# asm 1: movdqa <xmm15=int6464#16,80(<outp=int64#1)
12385# asm 2: movdqa <xmm15=%xmm15,80(<outp=%rdi)
12386movdqa %xmm15,80(%rdi)
12387
12388# qhasm: *(int128 *) (outp + 96) = xmm10
12389# asm 1: movdqa <xmm10=int6464#11,96(<outp=int64#1)
12390# asm 2: movdqa <xmm10=%xmm10,96(<outp=%rdi)
12391movdqa %xmm10,96(%rdi)
12392
12393# qhasm: *(int128 *) (outp + 112) = xmm13
12394# asm 1: movdqa <xmm13=int6464#14,112(<outp=int64#1)
12395# asm 2: movdqa <xmm13=%xmm13,112(<outp=%rdi)
12396movdqa %xmm13,112(%rdi)
12397# comment:fp stack unchanged by fallthrough
12398
12399# qhasm: end:
12400._end:
12401
12402# qhasm: leave
12403add %r11,%rsp
12404mov %rdi,%rax
12405mov %rsi,%rdx
12406xor %rax,%rax
12407ret